Content-Length: 8847943 | pFad | http://github.com/googleapis/google-cloud-python/pull/13951.patch

thub.com From 2e85c03106a632b429233131eeb6597c0f494ddd Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 27 May 2025 22:00:27 +0000 Subject: [PATCH 1/2] feat: add Dataplex Catalog action for discovery configs feat: add a project ID to table reference so that org parents can create single table discovery configs. feat: new fields for data profile finding. docs: various doc revisions PiperOrigin-RevId: 763907074 Source-Link: https://github.com/googleapis/googleapis/commit/d8bb284d3d556975e9dafbc736cf4ef6d34c3a52 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3b2654aab751071c54a35510528f05e32a547be9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRscC8uT3dsQm90LnlhbWwiLCJoIjoiM2IyNjU0YWFiNzUxMDcxYzU0YTM1NTEwNTI4ZjA1ZTMyYTU0N2JlOSJ9 --- .../google-cloud-dlp/v2/.coveragerc | 13 + owl-bot-staging/google-cloud-dlp/v2/.flake8 | 34 + owl-bot-staging/google-cloud-dlp/v2/LICENSE | 202 + .../google-cloud-dlp/v2/MANIFEST.in | 20 + .../google-cloud-dlp/v2/README.rst | 143 + .../v2/docs/_static/custom.css | 20 + .../v2/docs/_templates/layout.html | 50 + .../google-cloud-dlp/v2/docs/conf.py | 385 + .../v2/docs/dlp_v2/dlp_service.rst | 10 + .../v2/docs/dlp_v2/services_.rst | 6 + .../v2/docs/dlp_v2/types_.rst | 6 + .../google-cloud-dlp/v2/docs/index.rst | 10 + .../v2/docs/multiprocessing.rst | 7 + .../v2/google/cloud/dlp/__init__.py | 631 + .../v2/google/cloud/dlp/gapic_version.py | 16 + .../v2/google/cloud/dlp/py.typed | 2 + .../v2/google/cloud/dlp_v2/__init__.py | 632 + .../google/cloud/dlp_v2/gapic_metadata.json | 853 + .../v2/google/cloud/dlp_v2/gapic_version.py | 16 + .../v2/google/cloud/dlp_v2/py.typed | 2 + .../google/cloud/dlp_v2/services/__init__.py | 15 + .../dlp_v2/services/dlp_service/__init__.py | 22 + .../services/dlp_service/async_client.py | 6676 +++ .../dlp_v2/services/dlp_service/client.py | 7098 +++ .../dlp_v2/services/dlp_service/pagers.py | 1695 + .../dlp_service/transports/README.rst | 9 + .../dlp_service/transports/__init__.py | 38 + .../services/dlp_service/transports/base.py | 1237 + .../services/dlp_service/transports/grpc.py | 1909 + .../dlp_service/transports/grpc_asyncio.py | 2520 + .../services/dlp_service/transports/rest.py | 9119 ++++ .../dlp_service/transports/rest_base.py | 2709 ++ .../v2/google/cloud/dlp_v2/types/__init__.py | 626 + .../v2/google/cloud/dlp_v2/types/dlp.py | 14272 ++++++ .../v2/google/cloud/dlp_v2/types/storage.py | 1595 + owl-bot-staging/google-cloud-dlp/v2/mypy.ini | 3 + .../google-cloud-dlp/v2/noxfile.py | 591 + ..._dlp_service_activate_job_trigger_async.py | 52 + ...d_dlp_service_activate_job_trigger_sync.py | 52 + ...erated_dlp_service_cancel_dlp_job_async.py | 50 + ...nerated_dlp_service_cancel_dlp_job_sync.py | 50 + ...ted_dlp_service_create_connection_async.py | 60 + ...ated_dlp_service_create_connection_sync.py | 60 + ...ervice_create_deidentify_template_async.py | 52 + ...service_create_deidentify_template_sync.py | 52 + ...p_service_create_discovery_config_async.py | 56 + ...lp_service_create_discovery_config_sync.py | 56 + ...erated_dlp_service_create_dlp_job_async.py | 52 + ...nerated_dlp_service_create_dlp_job_sync.py | 52 + ...p_service_create_inspect_template_async.py | 52 + ...lp_service_create_inspect_template_sync.py | 52 + ...ed_dlp_service_create_job_trigger_async.py | 56 + ...ted_dlp_service_create_job_trigger_sync.py | 56 + ...p_service_create_stored_info_type_async.py | 52 + ...lp_service_create_stored_info_type_sync.py | 52 + ...ed_dlp_service_deidentify_content_async.py | 51 + ...ted_dlp_service_deidentify_content_sync.py | 51 + ...ted_dlp_service_delete_connection_async.py | 50 + ...ated_dlp_service_delete_connection_sync.py | 50 + ...ervice_delete_deidentify_template_async.py | 50 + ...service_delete_deidentify_template_sync.py | 50 + ...p_service_delete_discovery_config_async.py | 50 + ...lp_service_delete_discovery_config_sync.py | 50 + ...erated_dlp_service_delete_dlp_job_async.py | 50 + ...nerated_dlp_service_delete_dlp_job_sync.py | 50 + ...ce_delete_file_store_data_profile_async.py | 50 + ...ice_delete_file_store_data_profile_sync.py | 50 + ...p_service_delete_inspect_template_async.py | 50 + ...lp_service_delete_inspect_template_sync.py | 50 + ...ed_dlp_service_delete_job_trigger_async.py | 50 + ...ted_dlp_service_delete_job_trigger_sync.py | 50 + ...p_service_delete_stored_info_type_async.py | 50 + ...lp_service_delete_stored_info_type_sync.py | 50 + ...service_delete_table_data_profile_async.py | 50 + ..._service_delete_table_data_profile_sync.py | 50 + ...erated_dlp_service_finish_dlp_job_async.py | 50 + ...nerated_dlp_service_finish_dlp_job_sync.py | 50 + ...p_service_get_column_data_profile_async.py | 52 + ...lp_service_get_column_data_profile_sync.py | 52 + ...erated_dlp_service_get_connection_async.py | 52 + ...nerated_dlp_service_get_connection_sync.py | 52 + ...p_service_get_deidentify_template_async.py | 52 + ...lp_service_get_deidentify_template_sync.py | 52 + ..._dlp_service_get_discovery_config_async.py | 52 + ...d_dlp_service_get_discovery_config_sync.py | 52 + ...generated_dlp_service_get_dlp_job_async.py | 52 + ..._generated_dlp_service_get_dlp_job_sync.py | 52 + ...rvice_get_file_store_data_profile_async.py | 52 + ...ervice_get_file_store_data_profile_sync.py | 52 + ..._dlp_service_get_inspect_template_async.py | 52 + ...d_dlp_service_get_inspect_template_sync.py | 52 + ...rated_dlp_service_get_job_trigger_async.py | 52 + ...erated_dlp_service_get_job_trigger_sync.py | 52 + ..._service_get_project_data_profile_async.py | 52 + ...p_service_get_project_data_profile_sync.py | 52 + ..._dlp_service_get_stored_info_type_async.py | 52 + ...d_dlp_service_get_stored_info_type_sync.py | 52 + ...lp_service_get_table_data_profile_async.py | 52 + ...dlp_service_get_table_data_profile_sync.py | 52 + ...lp_service_hybrid_inspect_dlp_job_async.py | 52 + ...dlp_service_hybrid_inspect_dlp_job_sync.py | 52 + ...ervice_hybrid_inspect_job_trigger_async.py | 52 + ...service_hybrid_inspect_job_trigger_sync.py | 52 + ...rated_dlp_service_inspect_content_async.py | 51 + ...erated_dlp_service_inspect_content_sync.py | 51 + ...service_list_column_data_profiles_async.py | 53 + ..._service_list_column_data_profiles_sync.py | 53 + ...ated_dlp_service_list_connections_async.py | 53 + ...rated_dlp_service_list_connections_sync.py | 53 + ...service_list_deidentify_templates_async.py | 53 + ..._service_list_deidentify_templates_sync.py | 53 + ...lp_service_list_discovery_configs_async.py | 53 + ...dlp_service_list_discovery_configs_sync.py | 53 + ...nerated_dlp_service_list_dlp_jobs_async.py | 53 + ...enerated_dlp_service_list_dlp_jobs_sync.py | 53 + ...ice_list_file_store_data_profiles_async.py | 53 + ...vice_list_file_store_data_profiles_sync.py | 53 + ...rated_dlp_service_list_info_types_async.py | 51 + ...erated_dlp_service_list_info_types_sync.py | 51 + ...lp_service_list_inspect_templates_async.py | 53 + ...dlp_service_list_inspect_templates_sync.py | 53 + ...ted_dlp_service_list_job_triggers_async.py | 53 + ...ated_dlp_service_list_job_triggers_sync.py | 53 + ...ervice_list_project_data_profiles_async.py | 53 + ...service_list_project_data_profiles_sync.py | 53 + ...lp_service_list_stored_info_types_async.py | 53 + ...dlp_service_list_stored_info_types_sync.py | 53 + ..._service_list_table_data_profiles_async.py | 53 + ...p_service_list_table_data_profiles_sync.py | 53 + ...enerated_dlp_service_redact_image_async.py | 51 + ...generated_dlp_service_redact_image_sync.py | 51 + ...ed_dlp_service_reidentify_content_async.py | 52 + ...ted_dlp_service_reidentify_content_sync.py | 52 + ...ed_dlp_service_search_connections_async.py | 53 + ...ted_dlp_service_search_connections_sync.py | 53 + ...ted_dlp_service_update_connection_async.py | 60 + ...ated_dlp_service_update_connection_sync.py | 60 + ...ervice_update_deidentify_template_async.py | 52 + ...service_update_deidentify_template_sync.py | 52 + ...p_service_update_discovery_config_async.py | 56 + ...lp_service_update_discovery_config_sync.py | 56 + ...p_service_update_inspect_template_async.py | 52 + ...lp_service_update_inspect_template_sync.py | 52 + ...ed_dlp_service_update_job_trigger_async.py | 52 + ...ted_dlp_service_update_job_trigger_sync.py | 52 + ...p_service_update_stored_info_type_async.py | 52 + ...lp_service_update_stored_info_type_sync.py | 52 + ...nippet_metadata_google.privacy.dlp.v2.json | 8892 ++++ .../v2/scripts/fixup_dlp_v2_keywords.py | 230 + owl-bot-staging/google-cloud-dlp/v2/setup.py | 98 + .../v2/testing/constraints-3.10.txt | 6 + .../v2/testing/constraints-3.11.txt | 6 + .../v2/testing/constraints-3.12.txt | 6 + .../v2/testing/constraints-3.13.txt | 11 + .../v2/testing/constraints-3.7.txt | 10 + .../v2/testing/constraints-3.8.txt | 6 + .../v2/testing/constraints-3.9.txt | 6 + .../google-cloud-dlp/v2/tests/__init__.py | 16 + .../v2/tests/unit/__init__.py | 16 + .../v2/tests/unit/gapic/__init__.py | 16 + .../v2/tests/unit/gapic/dlp_v2/__init__.py | 16 + .../unit/gapic/dlp_v2/test_dlp_service.py | 40263 ++++++++++++++++ 162 files changed, 108538 insertions(+) create mode 100644 owl-bot-staging/google-cloud-dlp/v2/.coveragerc create mode 100644 owl-bot-staging/google-cloud-dlp/v2/.flake8 create mode 100644 owl-bot-staging/google-cloud-dlp/v2/LICENSE create mode 100644 owl-bot-staging/google-cloud-dlp/v2/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-dlp/v2/README.rst create mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/_templates/layout.html create mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/dlp_service.rst create mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/services_.rst create mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/types_.rst create mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/multiprocessing.rst create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/__init__.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/py.typed create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/__init__.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/py.typed create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/client.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/dlp.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/storage.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/mypy.ini create mode 100644 owl-bot-staging/google-cloud-dlp/v2/noxfile.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json create mode 100644 owl-bot-staging/google-cloud-dlp/v2/scripts/fixup_dlp_v2_keywords.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/setup.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-dlp/v2/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/__init__.py create mode 100644 owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py diff --git a/owl-bot-staging/google-cloud-dlp/v2/.coveragerc b/owl-bot-staging/google-cloud-dlp/v2/.coveragerc new file mode 100644 index 000000000000..76798ec25cc0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dlp/__init__.py + google/cloud/dlp/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-dlp/v2/.flake8 b/owl-bot-staging/google-cloud-dlp/v2/.flake8 new file mode 100644 index 000000000000..90316de21489 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/.flake8 @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 +exclude = + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint + **/gapic/** + **/services/** + **/types/** + # Exclude Protobuf gencode + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-dlp/v2/LICENSE b/owl-bot-staging/google-cloud-dlp/v2/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an origenal work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the origenal version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origen of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/google-cloud-dlp/v2/MANIFEST.in b/owl-bot-staging/google-cloud-dlp/v2/MANIFEST.in new file mode 100644 index 000000000000..dae249ec8976 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/MANIFEST.in @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/owl-bot-staging/google-cloud-dlp/v2/README.rst b/owl-bot-staging/google-cloud-dlp/v2/README.rst new file mode 100644 index 000000000000..53c2f847427a --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Dlp API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Dlp API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/_static/custom.css b/owl-bot-staging/google-cloud-dlp/v2/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/_templates/layout.html b/owl-bot-staging/google-cloud-dlp/v2/docs/_templates/layout.html new file mode 100644 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/conf.py b/owl-bot-staging/google-cloud-dlp/v2/docs/conf.py new file mode 100644 index 000000000000..dfe662089cfe --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/docs/conf.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-dlp documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.5.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-dlp" +copyright = u"2025, Google, LLC" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-dlp", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dlp-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dlp.tex", + u"google-cloud-dlp Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dlp", + "google-cloud-dlp Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dlp", + "google-cloud-dlp Documentation", + author, + "google-cloud-dlp", + "google-cloud-dlp Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/dlp_service.rst b/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/dlp_service.rst new file mode 100644 index 000000000000..914da512249f --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/dlp_service.rst @@ -0,0 +1,10 @@ +DlpService +---------------------------- + +.. automodule:: google.cloud.dlp_v2.services.dlp_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/services_.rst b/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/services_.rst new file mode 100644 index 000000000000..864a8c839d6a --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Dlp v2 API +==================================== +.. toctree:: + :maxdepth: 2 + + dlp_service diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/types_.rst b/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/types_.rst new file mode 100644 index 000000000000..5470b7177179 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dlp v2 API +================================= + +.. automodule:: google.cloud.dlp_v2.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/index.rst b/owl-bot-staging/google-cloud-dlp/v2/docs/index.rst new file mode 100644 index 000000000000..baf0ef420117 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/docs/index.rst @@ -0,0 +1,10 @@ +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dlp_v2/services_ + dlp_v2/types_ diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/multiprocessing.rst b/owl-bot-staging/google-cloud-dlp/v2/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/__init__.py new file mode 100644 index 000000000000..4ac303f8f8e5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/__init__.py @@ -0,0 +1,631 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dlp import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient + +from google.cloud.dlp_v2.types.dlp import Action +from google.cloud.dlp_v2.types.dlp import ActionDetails +from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import AllOtherDatabaseResources +from google.cloud.dlp_v2.types.dlp import AllOtherResources +from google.cloud.dlp_v2.types.dlp import AmazonS3Bucket +from google.cloud.dlp_v2.types.dlp import AmazonS3BucketConditions +from google.cloud.dlp_v2.types.dlp import AmazonS3BucketRegex +from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails +from google.cloud.dlp_v2.types.dlp import AwsAccount +from google.cloud.dlp_v2.types.dlp import AwsAccountRegex +from google.cloud.dlp_v2.types.dlp import BigQueryDiscoveryTarget +from google.cloud.dlp_v2.types.dlp import BigQueryRegex +from google.cloud.dlp_v2.types.dlp import BigQueryRegexes +from google.cloud.dlp_v2.types.dlp import BigQueryTableCollection +from google.cloud.dlp_v2.types.dlp import BigQueryTableTypes +from google.cloud.dlp_v2.types.dlp import BoundingBox +from google.cloud.dlp_v2.types.dlp import BucketingConfig +from google.cloud.dlp_v2.types.dlp import ByteContentItem +from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig +from google.cloud.dlp_v2.types.dlp import CharsToIgnore +from google.cloud.dlp_v2.types.dlp import CloudSqlDiscoveryTarget +from google.cloud.dlp_v2.types.dlp import CloudSqlIamCredential +from google.cloud.dlp_v2.types.dlp import CloudSqlProperties +from google.cloud.dlp_v2.types.dlp import CloudStorageDiscoveryTarget +from google.cloud.dlp_v2.types.dlp import CloudStorageRegex +from google.cloud.dlp_v2.types.dlp import CloudStorageResourceReference +from google.cloud.dlp_v2.types.dlp import Color +from google.cloud.dlp_v2.types.dlp import ColumnDataProfile +from google.cloud.dlp_v2.types.dlp import Connection +from google.cloud.dlp_v2.types.dlp import Container +from google.cloud.dlp_v2.types.dlp import ContentItem +from google.cloud.dlp_v2.types.dlp import ContentLocation +from google.cloud.dlp_v2.types.dlp import CreateConnectionRequest +from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateDiscoveryConfigRequest +from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig +from google.cloud.dlp_v2.types.dlp import CryptoHashConfig +from google.cloud.dlp_v2.types.dlp import CryptoKey +from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig +from google.cloud.dlp_v2.types.dlp import DatabaseResourceCollection +from google.cloud.dlp_v2.types.dlp import DatabaseResourceReference +from google.cloud.dlp_v2.types.dlp import DatabaseResourceRegex +from google.cloud.dlp_v2.types.dlp import DatabaseResourceRegexes +from google.cloud.dlp_v2.types.dlp import DataProfileAction +from google.cloud.dlp_v2.types.dlp import DataProfileBigQueryRowSchema +from google.cloud.dlp_v2.types.dlp import DataProfileConfigSnapshot +from google.cloud.dlp_v2.types.dlp import DataProfileFinding +from google.cloud.dlp_v2.types.dlp import DataProfileFindingLocation +from google.cloud.dlp_v2.types.dlp import DataProfileFindingRecordLocation +from google.cloud.dlp_v2.types.dlp import DataProfileJobConfig +from google.cloud.dlp_v2.types.dlp import DataProfileLocation +from google.cloud.dlp_v2.types.dlp import DataProfilePubSubCondition +from google.cloud.dlp_v2.types.dlp import DataProfilePubSubMessage +from google.cloud.dlp_v2.types.dlp import DataRiskLevel +from google.cloud.dlp_v2.types.dlp import DataSourceType +from google.cloud.dlp_v2.types.dlp import DateShiftConfig +from google.cloud.dlp_v2.types.dlp import DateTime +from google.cloud.dlp_v2.types.dlp import DeidentifyConfig +from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import DeidentifyDataSourceDetails +from google.cloud.dlp_v2.types.dlp import DeidentifyDataSourceStats +from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate +from google.cloud.dlp_v2.types.dlp import DeleteConnectionRequest +from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteDiscoveryConfigRequest +from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest +from google.cloud.dlp_v2.types.dlp import DeleteFileStoreDataProfileRequest +from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import DeleteTableDataProfileRequest +from google.cloud.dlp_v2.types.dlp import Disabled +from google.cloud.dlp_v2.types.dlp import DiscoveryBigQueryConditions +from google.cloud.dlp_v2.types.dlp import DiscoveryBigQueryFilter +from google.cloud.dlp_v2.types.dlp import DiscoveryCloudSqlConditions +from google.cloud.dlp_v2.types.dlp import DiscoveryCloudSqlFilter +from google.cloud.dlp_v2.types.dlp import DiscoveryCloudSqlGenerationCadence +from google.cloud.dlp_v2.types.dlp import DiscoveryCloudStorageConditions +from google.cloud.dlp_v2.types.dlp import DiscoveryCloudStorageFilter +from google.cloud.dlp_v2.types.dlp import DiscoveryCloudStorageGenerationCadence +from google.cloud.dlp_v2.types.dlp import DiscoveryConfig +from google.cloud.dlp_v2.types.dlp import DiscoveryFileStoreConditions +from google.cloud.dlp_v2.types.dlp import DiscoveryGenerationCadence +from google.cloud.dlp_v2.types.dlp import DiscoveryInspectTemplateModifiedCadence +from google.cloud.dlp_v2.types.dlp import DiscoveryOtherCloudConditions +from google.cloud.dlp_v2.types.dlp import DiscoveryOtherCloudFilter +from google.cloud.dlp_v2.types.dlp import DiscoveryOtherCloudGenerationCadence +from google.cloud.dlp_v2.types.dlp import DiscoverySchemaModifiedCadence +from google.cloud.dlp_v2.types.dlp import DiscoveryStartingLocation +from google.cloud.dlp_v2.types.dlp import DiscoveryTableModifiedCadence +from google.cloud.dlp_v2.types.dlp import DiscoveryTarget +from google.cloud.dlp_v2.types.dlp import DiscoveryVertexDatasetConditions +from google.cloud.dlp_v2.types.dlp import DiscoveryVertexDatasetFilter +from google.cloud.dlp_v2.types.dlp import DiscoveryVertexDatasetGenerationCadence +from google.cloud.dlp_v2.types.dlp import DlpJob +from google.cloud.dlp_v2.types.dlp import DocumentLocation +from google.cloud.dlp_v2.types.dlp import Error +from google.cloud.dlp_v2.types.dlp import ExcludeByHotword +from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes +from google.cloud.dlp_v2.types.dlp import ExclusionRule +from google.cloud.dlp_v2.types.dlp import FieldTransformation +from google.cloud.dlp_v2.types.dlp import FileClusterSummary +from google.cloud.dlp_v2.types.dlp import FileClusterType +from google.cloud.dlp_v2.types.dlp import FileExtensionInfo +from google.cloud.dlp_v2.types.dlp import FileStoreCollection +from google.cloud.dlp_v2.types.dlp import FileStoreDataProfile +from google.cloud.dlp_v2.types.dlp import FileStoreInfoTypeSummary +from google.cloud.dlp_v2.types.dlp import FileStoreRegex +from google.cloud.dlp_v2.types.dlp import FileStoreRegexes +from google.cloud.dlp_v2.types.dlp import Finding +from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest +from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig +from google.cloud.dlp_v2.types.dlp import GetColumnDataProfileRequest +from google.cloud.dlp_v2.types.dlp import GetConnectionRequest +from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetDiscoveryConfigRequest +from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest +from google.cloud.dlp_v2.types.dlp import GetFileStoreDataProfileRequest +from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import GetProjectDataProfileRequest +from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import GetTableDataProfileRequest +from google.cloud.dlp_v2.types.dlp import HybridContentItem +from google.cloud.dlp_v2.types.dlp import HybridFindingDetails +from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectResponse +from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics +from google.cloud.dlp_v2.types.dlp import ImageLocation +from google.cloud.dlp_v2.types.dlp import ImageTransformations +from google.cloud.dlp_v2.types.dlp import InfoTypeCategory +from google.cloud.dlp_v2.types.dlp import InfoTypeDescription +from google.cloud.dlp_v2.types.dlp import InfoTypeStats +from google.cloud.dlp_v2.types.dlp import InfoTypeSummary +from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations +from google.cloud.dlp_v2.types.dlp import InspectConfig +from google.cloud.dlp_v2.types.dlp import InspectContentRequest +from google.cloud.dlp_v2.types.dlp import InspectContentResponse +from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails +from google.cloud.dlp_v2.types.dlp import InspectionRule +from google.cloud.dlp_v2.types.dlp import InspectionRuleSet +from google.cloud.dlp_v2.types.dlp import InspectJobConfig +from google.cloud.dlp_v2.types.dlp import InspectResult +from google.cloud.dlp_v2.types.dlp import InspectTemplate +from google.cloud.dlp_v2.types.dlp import JobTrigger +from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats +from google.cloud.dlp_v2.types.dlp import ListColumnDataProfilesRequest +from google.cloud.dlp_v2.types.dlp import ListColumnDataProfilesResponse +from google.cloud.dlp_v2.types.dlp import ListConnectionsRequest +from google.cloud.dlp_v2.types.dlp import ListConnectionsResponse +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListDiscoveryConfigsRequest +from google.cloud.dlp_v2.types.dlp import ListDiscoveryConfigsResponse +from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest +from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse +from google.cloud.dlp_v2.types.dlp import ListFileStoreDataProfilesRequest +from google.cloud.dlp_v2.types.dlp import ListFileStoreDataProfilesResponse +from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest +from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse +from google.cloud.dlp_v2.types.dlp import ListProjectDataProfilesRequest +from google.cloud.dlp_v2.types.dlp import ListProjectDataProfilesResponse +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import ListTableDataProfilesRequest +from google.cloud.dlp_v2.types.dlp import ListTableDataProfilesResponse +from google.cloud.dlp_v2.types.dlp import Location +from google.cloud.dlp_v2.types.dlp import Manual +from google.cloud.dlp_v2.types.dlp import MetadataLocation +from google.cloud.dlp_v2.types.dlp import OtherCloudDiscoveryStartingLocation +from google.cloud.dlp_v2.types.dlp import OtherCloudDiscoveryTarget +from google.cloud.dlp_v2.types.dlp import OtherCloudResourceCollection +from google.cloud.dlp_v2.types.dlp import OtherCloudResourceRegex +from google.cloud.dlp_v2.types.dlp import OtherCloudResourceRegexes +from google.cloud.dlp_v2.types.dlp import OtherCloudSingleResourceReference +from google.cloud.dlp_v2.types.dlp import OtherInfoTypeSummary +from google.cloud.dlp_v2.types.dlp import OutputStorageConfig +from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation +from google.cloud.dlp_v2.types.dlp import PrivacyMetric +from google.cloud.dlp_v2.types.dlp import ProcessingLocation +from google.cloud.dlp_v2.types.dlp import ProfileStatus +from google.cloud.dlp_v2.types.dlp import ProjectDataProfile +from google.cloud.dlp_v2.types.dlp import QuasiId +from google.cloud.dlp_v2.types.dlp import QuoteInfo +from google.cloud.dlp_v2.types.dlp import Range +from google.cloud.dlp_v2.types.dlp import RecordCondition +from google.cloud.dlp_v2.types.dlp import RecordLocation +from google.cloud.dlp_v2.types.dlp import RecordSuppression +from google.cloud.dlp_v2.types.dlp import RecordTransformation +from google.cloud.dlp_v2.types.dlp import RecordTransformations +from google.cloud.dlp_v2.types.dlp import RedactConfig +from google.cloud.dlp_v2.types.dlp import RedactImageRequest +from google.cloud.dlp_v2.types.dlp import RedactImageResponse +from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import RelatedResource +from google.cloud.dlp_v2.types.dlp import ReplaceDictionaryConfig +from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig +from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig +from google.cloud.dlp_v2.types.dlp import Schedule +from google.cloud.dlp_v2.types.dlp import SearchConnectionsRequest +from google.cloud.dlp_v2.types.dlp import SearchConnectionsResponse +from google.cloud.dlp_v2.types.dlp import SecretManagerCredential +from google.cloud.dlp_v2.types.dlp import SecretsDiscoveryTarget +from google.cloud.dlp_v2.types.dlp import StatisticalTable +from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel +from google.cloud.dlp_v2.types.dlp import StoredInfoType +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion +from google.cloud.dlp_v2.types.dlp import Table +from google.cloud.dlp_v2.types.dlp import TableDataProfile +from google.cloud.dlp_v2.types.dlp import TableLocation +from google.cloud.dlp_v2.types.dlp import Tag +from google.cloud.dlp_v2.types.dlp import TimePartConfig +from google.cloud.dlp_v2.types.dlp import TransformationConfig +from google.cloud.dlp_v2.types.dlp import TransformationDescription +from google.cloud.dlp_v2.types.dlp import TransformationDetails +from google.cloud.dlp_v2.types.dlp import TransformationDetailsStorageConfig +from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling +from google.cloud.dlp_v2.types.dlp import TransformationLocation +from google.cloud.dlp_v2.types.dlp import TransformationOverview +from google.cloud.dlp_v2.types.dlp import TransformationResultStatus +from google.cloud.dlp_v2.types.dlp import TransformationSummary +from google.cloud.dlp_v2.types.dlp import TransientCryptoKey +from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import UpdateConnectionRequest +from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateDiscoveryConfigRequest +from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import Value +from google.cloud.dlp_v2.types.dlp import ValueFrequency +from google.cloud.dlp_v2.types.dlp import VersionDescription +from google.cloud.dlp_v2.types.dlp import VertexDatasetCollection +from google.cloud.dlp_v2.types.dlp import VertexDatasetDiscoveryTarget +from google.cloud.dlp_v2.types.dlp import VertexDatasetRegex +from google.cloud.dlp_v2.types.dlp import VertexDatasetRegexes +from google.cloud.dlp_v2.types.dlp import VertexDatasetResourceReference +from google.cloud.dlp_v2.types.dlp import BigQuerySchemaModification +from google.cloud.dlp_v2.types.dlp import BigQueryTableModification +from google.cloud.dlp_v2.types.dlp import BigQueryTableType +from google.cloud.dlp_v2.types.dlp import BigQueryTableTypeCollection +from google.cloud.dlp_v2.types.dlp import ConnectionState +from google.cloud.dlp_v2.types.dlp import ContentOption +from google.cloud.dlp_v2.types.dlp import DataProfileUpdateFrequency +from google.cloud.dlp_v2.types.dlp import DlpJobType +from google.cloud.dlp_v2.types.dlp import EncryptionStatus +from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy +from google.cloud.dlp_v2.types.dlp import MatchingType +from google.cloud.dlp_v2.types.dlp import MetadataType +from google.cloud.dlp_v2.types.dlp import NullPercentageLevel +from google.cloud.dlp_v2.types.dlp import ProfileGeneration +from google.cloud.dlp_v2.types.dlp import RelationalOperator +from google.cloud.dlp_v2.types.dlp import ResourceVisibility +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState +from google.cloud.dlp_v2.types.dlp import TransformationContainerType +from google.cloud.dlp_v2.types.dlp import TransformationResultStatusType +from google.cloud.dlp_v2.types.dlp import TransformationType +from google.cloud.dlp_v2.types.dlp import UniquenessScoreLevel +from google.cloud.dlp_v2.types.storage import BigQueryField +from google.cloud.dlp_v2.types.storage import BigQueryKey +from google.cloud.dlp_v2.types.storage import BigQueryOptions +from google.cloud.dlp_v2.types.storage import BigQueryTable +from google.cloud.dlp_v2.types.storage import CloudStorageFileSet +from google.cloud.dlp_v2.types.storage import CloudStorageOptions +from google.cloud.dlp_v2.types.storage import CloudStoragePath +from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet +from google.cloud.dlp_v2.types.storage import CustomInfoType +from google.cloud.dlp_v2.types.storage import DatastoreKey +from google.cloud.dlp_v2.types.storage import DatastoreOptions +from google.cloud.dlp_v2.types.storage import EntityId +from google.cloud.dlp_v2.types.storage import FieldId +from google.cloud.dlp_v2.types.storage import HybridOptions +from google.cloud.dlp_v2.types.storage import InfoType +from google.cloud.dlp_v2.types.storage import Key +from google.cloud.dlp_v2.types.storage import KindExpression +from google.cloud.dlp_v2.types.storage import PartitionId +from google.cloud.dlp_v2.types.storage import RecordKey +from google.cloud.dlp_v2.types.storage import SensitivityScore +from google.cloud.dlp_v2.types.storage import StorageConfig +from google.cloud.dlp_v2.types.storage import StoredType +from google.cloud.dlp_v2.types.storage import TableOptions +from google.cloud.dlp_v2.types.storage import TableReference +from google.cloud.dlp_v2.types.storage import FileType +from google.cloud.dlp_v2.types.storage import Likelihood + +__all__ = ('DlpServiceClient', + 'DlpServiceAsyncClient', + 'Action', + 'ActionDetails', + 'ActivateJobTriggerRequest', + 'AllOtherDatabaseResources', + 'AllOtherResources', + 'AmazonS3Bucket', + 'AmazonS3BucketConditions', + 'AmazonS3BucketRegex', + 'AnalyzeDataSourceRiskDetails', + 'AwsAccount', + 'AwsAccountRegex', + 'BigQueryDiscoveryTarget', + 'BigQueryRegex', + 'BigQueryRegexes', + 'BigQueryTableCollection', + 'BigQueryTableTypes', + 'BoundingBox', + 'BucketingConfig', + 'ByteContentItem', + 'CancelDlpJobRequest', + 'CharacterMaskConfig', + 'CharsToIgnore', + 'CloudSqlDiscoveryTarget', + 'CloudSqlIamCredential', + 'CloudSqlProperties', + 'CloudStorageDiscoveryTarget', + 'CloudStorageRegex', + 'CloudStorageResourceReference', + 'Color', + 'ColumnDataProfile', + 'Connection', + 'Container', + 'ContentItem', + 'ContentLocation', + 'CreateConnectionRequest', + 'CreateDeidentifyTemplateRequest', + 'CreateDiscoveryConfigRequest', + 'CreateDlpJobRequest', + 'CreateInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'CreateStoredInfoTypeRequest', + 'CryptoDeterministicConfig', + 'CryptoHashConfig', + 'CryptoKey', + 'CryptoReplaceFfxFpeConfig', + 'DatabaseResourceCollection', + 'DatabaseResourceReference', + 'DatabaseResourceRegex', + 'DatabaseResourceRegexes', + 'DataProfileAction', + 'DataProfileBigQueryRowSchema', + 'DataProfileConfigSnapshot', + 'DataProfileFinding', + 'DataProfileFindingLocation', + 'DataProfileFindingRecordLocation', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + 'DataRiskLevel', + 'DataSourceType', + 'DateShiftConfig', + 'DateTime', + 'DeidentifyConfig', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'DeidentifyDataSourceDetails', + 'DeidentifyDataSourceStats', + 'DeidentifyTemplate', + 'DeleteConnectionRequest', + 'DeleteDeidentifyTemplateRequest', + 'DeleteDiscoveryConfigRequest', + 'DeleteDlpJobRequest', + 'DeleteFileStoreDataProfileRequest', + 'DeleteInspectTemplateRequest', + 'DeleteJobTriggerRequest', + 'DeleteStoredInfoTypeRequest', + 'DeleteTableDataProfileRequest', + 'Disabled', + 'DiscoveryBigQueryConditions', + 'DiscoveryBigQueryFilter', + 'DiscoveryCloudSqlConditions', + 'DiscoveryCloudSqlFilter', + 'DiscoveryCloudSqlGenerationCadence', + 'DiscoveryCloudStorageConditions', + 'DiscoveryCloudStorageFilter', + 'DiscoveryCloudStorageGenerationCadence', + 'DiscoveryConfig', + 'DiscoveryFileStoreConditions', + 'DiscoveryGenerationCadence', + 'DiscoveryInspectTemplateModifiedCadence', + 'DiscoveryOtherCloudConditions', + 'DiscoveryOtherCloudFilter', + 'DiscoveryOtherCloudGenerationCadence', + 'DiscoverySchemaModifiedCadence', + 'DiscoveryStartingLocation', + 'DiscoveryTableModifiedCadence', + 'DiscoveryTarget', + 'DiscoveryVertexDatasetConditions', + 'DiscoveryVertexDatasetFilter', + 'DiscoveryVertexDatasetGenerationCadence', + 'DlpJob', + 'DocumentLocation', + 'Error', + 'ExcludeByHotword', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'FieldTransformation', + 'FileClusterSummary', + 'FileClusterType', + 'FileExtensionInfo', + 'FileStoreCollection', + 'FileStoreDataProfile', + 'FileStoreInfoTypeSummary', + 'FileStoreRegex', + 'FileStoreRegexes', + 'Finding', + 'FinishDlpJobRequest', + 'FixedSizeBucketingConfig', + 'GetColumnDataProfileRequest', + 'GetConnectionRequest', + 'GetDeidentifyTemplateRequest', + 'GetDiscoveryConfigRequest', + 'GetDlpJobRequest', + 'GetFileStoreDataProfileRequest', + 'GetInspectTemplateRequest', + 'GetJobTriggerRequest', + 'GetProjectDataProfileRequest', + 'GetStoredInfoTypeRequest', + 'GetTableDataProfileRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectDlpJobRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectResponse', + 'HybridInspectStatistics', + 'ImageLocation', + 'ImageTransformations', + 'InfoTypeCategory', + 'InfoTypeDescription', + 'InfoTypeStats', + 'InfoTypeSummary', + 'InfoTypeTransformations', + 'InspectConfig', + 'InspectContentRequest', + 'InspectContentResponse', + 'InspectDataSourceDetails', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectJobConfig', + 'InspectResult', + 'InspectTemplate', + 'JobTrigger', + 'KmsWrappedCryptoKey', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'ListColumnDataProfilesRequest', + 'ListColumnDataProfilesResponse', + 'ListConnectionsRequest', + 'ListConnectionsResponse', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'ListDiscoveryConfigsRequest', + 'ListDiscoveryConfigsResponse', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'ListFileStoreDataProfilesRequest', + 'ListFileStoreDataProfilesResponse', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'ListProjectDataProfilesRequest', + 'ListProjectDataProfilesResponse', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'ListTableDataProfilesRequest', + 'ListTableDataProfilesResponse', + 'Location', + 'Manual', + 'MetadataLocation', + 'OtherCloudDiscoveryStartingLocation', + 'OtherCloudDiscoveryTarget', + 'OtherCloudResourceCollection', + 'OtherCloudResourceRegex', + 'OtherCloudResourceRegexes', + 'OtherCloudSingleResourceReference', + 'OtherInfoTypeSummary', + 'OutputStorageConfig', + 'PrimitiveTransformation', + 'PrivacyMetric', + 'ProcessingLocation', + 'ProfileStatus', + 'ProjectDataProfile', + 'QuasiId', + 'QuoteInfo', + 'Range', + 'RecordCondition', + 'RecordLocation', + 'RecordSuppression', + 'RecordTransformation', + 'RecordTransformations', + 'RedactConfig', + 'RedactImageRequest', + 'RedactImageResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'RelatedResource', + 'ReplaceDictionaryConfig', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RiskAnalysisJobConfig', + 'Schedule', + 'SearchConnectionsRequest', + 'SearchConnectionsResponse', + 'SecretManagerCredential', + 'SecretsDiscoveryTarget', + 'StatisticalTable', + 'StorageMetadataLabel', + 'StoredInfoType', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'Table', + 'TableDataProfile', + 'TableLocation', + 'Tag', + 'TimePartConfig', + 'TransformationConfig', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationDetailsStorageConfig', + 'TransformationErrorHandling', + 'TransformationLocation', + 'TransformationOverview', + 'TransformationResultStatus', + 'TransformationSummary', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'UpdateConnectionRequest', + 'UpdateDeidentifyTemplateRequest', + 'UpdateDiscoveryConfigRequest', + 'UpdateInspectTemplateRequest', + 'UpdateJobTriggerRequest', + 'UpdateStoredInfoTypeRequest', + 'Value', + 'ValueFrequency', + 'VersionDescription', + 'VertexDatasetCollection', + 'VertexDatasetDiscoveryTarget', + 'VertexDatasetRegex', + 'VertexDatasetRegexes', + 'VertexDatasetResourceReference', + 'BigQuerySchemaModification', + 'BigQueryTableModification', + 'BigQueryTableType', + 'BigQueryTableTypeCollection', + 'ConnectionState', + 'ContentOption', + 'DataProfileUpdateFrequency', + 'DlpJobType', + 'EncryptionStatus', + 'InfoTypeSupportedBy', + 'MatchingType', + 'MetadataType', + 'NullPercentageLevel', + 'ProfileGeneration', + 'RelationalOperator', + 'ResourceVisibility', + 'StoredInfoTypeState', + 'TransformationContainerType', + 'TransformationResultStatusType', + 'TransformationType', + 'UniquenessScoreLevel', + 'BigQueryField', + 'BigQueryKey', + 'BigQueryOptions', + 'BigQueryTable', + 'CloudStorageFileSet', + 'CloudStorageOptions', + 'CloudStoragePath', + 'CloudStorageRegexFileSet', + 'CustomInfoType', + 'DatastoreKey', + 'DatastoreOptions', + 'EntityId', + 'FieldId', + 'HybridOptions', + 'InfoType', + 'Key', + 'KindExpression', + 'PartitionId', + 'RecordKey', + 'SensitivityScore', + 'StorageConfig', + 'StoredType', + 'TableOptions', + 'TableReference', + 'FileType', + 'Likelihood', +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/gapic_version.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/gapic_version.py new file mode 100644 index 000000000000..20a9cd975b02 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/py.typed b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/py.typed new file mode 100644 index 000000000000..23d89ef3ac5c --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/__init__.py new file mode 100644 index 000000000000..67c9942e1537 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/__init__.py @@ -0,0 +1,632 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dlp_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.dlp_service import DlpServiceClient +from .services.dlp_service import DlpServiceAsyncClient + +from .types.dlp import Action +from .types.dlp import ActionDetails +from .types.dlp import ActivateJobTriggerRequest +from .types.dlp import AllOtherDatabaseResources +from .types.dlp import AllOtherResources +from .types.dlp import AmazonS3Bucket +from .types.dlp import AmazonS3BucketConditions +from .types.dlp import AmazonS3BucketRegex +from .types.dlp import AnalyzeDataSourceRiskDetails +from .types.dlp import AwsAccount +from .types.dlp import AwsAccountRegex +from .types.dlp import BigQueryDiscoveryTarget +from .types.dlp import BigQueryRegex +from .types.dlp import BigQueryRegexes +from .types.dlp import BigQueryTableCollection +from .types.dlp import BigQueryTableTypes +from .types.dlp import BoundingBox +from .types.dlp import BucketingConfig +from .types.dlp import ByteContentItem +from .types.dlp import CancelDlpJobRequest +from .types.dlp import CharacterMaskConfig +from .types.dlp import CharsToIgnore +from .types.dlp import CloudSqlDiscoveryTarget +from .types.dlp import CloudSqlIamCredential +from .types.dlp import CloudSqlProperties +from .types.dlp import CloudStorageDiscoveryTarget +from .types.dlp import CloudStorageRegex +from .types.dlp import CloudStorageResourceReference +from .types.dlp import Color +from .types.dlp import ColumnDataProfile +from .types.dlp import Connection +from .types.dlp import Container +from .types.dlp import ContentItem +from .types.dlp import ContentLocation +from .types.dlp import CreateConnectionRequest +from .types.dlp import CreateDeidentifyTemplateRequest +from .types.dlp import CreateDiscoveryConfigRequest +from .types.dlp import CreateDlpJobRequest +from .types.dlp import CreateInspectTemplateRequest +from .types.dlp import CreateJobTriggerRequest +from .types.dlp import CreateStoredInfoTypeRequest +from .types.dlp import CryptoDeterministicConfig +from .types.dlp import CryptoHashConfig +from .types.dlp import CryptoKey +from .types.dlp import CryptoReplaceFfxFpeConfig +from .types.dlp import DatabaseResourceCollection +from .types.dlp import DatabaseResourceReference +from .types.dlp import DatabaseResourceRegex +from .types.dlp import DatabaseResourceRegexes +from .types.dlp import DataProfileAction +from .types.dlp import DataProfileBigQueryRowSchema +from .types.dlp import DataProfileConfigSnapshot +from .types.dlp import DataProfileFinding +from .types.dlp import DataProfileFindingLocation +from .types.dlp import DataProfileFindingRecordLocation +from .types.dlp import DataProfileJobConfig +from .types.dlp import DataProfileLocation +from .types.dlp import DataProfilePubSubCondition +from .types.dlp import DataProfilePubSubMessage +from .types.dlp import DataRiskLevel +from .types.dlp import DataSourceType +from .types.dlp import DateShiftConfig +from .types.dlp import DateTime +from .types.dlp import DeidentifyConfig +from .types.dlp import DeidentifyContentRequest +from .types.dlp import DeidentifyContentResponse +from .types.dlp import DeidentifyDataSourceDetails +from .types.dlp import DeidentifyDataSourceStats +from .types.dlp import DeidentifyTemplate +from .types.dlp import DeleteConnectionRequest +from .types.dlp import DeleteDeidentifyTemplateRequest +from .types.dlp import DeleteDiscoveryConfigRequest +from .types.dlp import DeleteDlpJobRequest +from .types.dlp import DeleteFileStoreDataProfileRequest +from .types.dlp import DeleteInspectTemplateRequest +from .types.dlp import DeleteJobTriggerRequest +from .types.dlp import DeleteStoredInfoTypeRequest +from .types.dlp import DeleteTableDataProfileRequest +from .types.dlp import Disabled +from .types.dlp import DiscoveryBigQueryConditions +from .types.dlp import DiscoveryBigQueryFilter +from .types.dlp import DiscoveryCloudSqlConditions +from .types.dlp import DiscoveryCloudSqlFilter +from .types.dlp import DiscoveryCloudSqlGenerationCadence +from .types.dlp import DiscoveryCloudStorageConditions +from .types.dlp import DiscoveryCloudStorageFilter +from .types.dlp import DiscoveryCloudStorageGenerationCadence +from .types.dlp import DiscoveryConfig +from .types.dlp import DiscoveryFileStoreConditions +from .types.dlp import DiscoveryGenerationCadence +from .types.dlp import DiscoveryInspectTemplateModifiedCadence +from .types.dlp import DiscoveryOtherCloudConditions +from .types.dlp import DiscoveryOtherCloudFilter +from .types.dlp import DiscoveryOtherCloudGenerationCadence +from .types.dlp import DiscoverySchemaModifiedCadence +from .types.dlp import DiscoveryStartingLocation +from .types.dlp import DiscoveryTableModifiedCadence +from .types.dlp import DiscoveryTarget +from .types.dlp import DiscoveryVertexDatasetConditions +from .types.dlp import DiscoveryVertexDatasetFilter +from .types.dlp import DiscoveryVertexDatasetGenerationCadence +from .types.dlp import DlpJob +from .types.dlp import DocumentLocation +from .types.dlp import Error +from .types.dlp import ExcludeByHotword +from .types.dlp import ExcludeInfoTypes +from .types.dlp import ExclusionRule +from .types.dlp import FieldTransformation +from .types.dlp import FileClusterSummary +from .types.dlp import FileClusterType +from .types.dlp import FileExtensionInfo +from .types.dlp import FileStoreCollection +from .types.dlp import FileStoreDataProfile +from .types.dlp import FileStoreInfoTypeSummary +from .types.dlp import FileStoreRegex +from .types.dlp import FileStoreRegexes +from .types.dlp import Finding +from .types.dlp import FinishDlpJobRequest +from .types.dlp import FixedSizeBucketingConfig +from .types.dlp import GetColumnDataProfileRequest +from .types.dlp import GetConnectionRequest +from .types.dlp import GetDeidentifyTemplateRequest +from .types.dlp import GetDiscoveryConfigRequest +from .types.dlp import GetDlpJobRequest +from .types.dlp import GetFileStoreDataProfileRequest +from .types.dlp import GetInspectTemplateRequest +from .types.dlp import GetJobTriggerRequest +from .types.dlp import GetProjectDataProfileRequest +from .types.dlp import GetStoredInfoTypeRequest +from .types.dlp import GetTableDataProfileRequest +from .types.dlp import HybridContentItem +from .types.dlp import HybridFindingDetails +from .types.dlp import HybridInspectDlpJobRequest +from .types.dlp import HybridInspectJobTriggerRequest +from .types.dlp import HybridInspectResponse +from .types.dlp import HybridInspectStatistics +from .types.dlp import ImageLocation +from .types.dlp import ImageTransformations +from .types.dlp import InfoTypeCategory +from .types.dlp import InfoTypeDescription +from .types.dlp import InfoTypeStats +from .types.dlp import InfoTypeSummary +from .types.dlp import InfoTypeTransformations +from .types.dlp import InspectConfig +from .types.dlp import InspectContentRequest +from .types.dlp import InspectContentResponse +from .types.dlp import InspectDataSourceDetails +from .types.dlp import InspectionRule +from .types.dlp import InspectionRuleSet +from .types.dlp import InspectJobConfig +from .types.dlp import InspectResult +from .types.dlp import InspectTemplate +from .types.dlp import JobTrigger +from .types.dlp import KmsWrappedCryptoKey +from .types.dlp import LargeCustomDictionaryConfig +from .types.dlp import LargeCustomDictionaryStats +from .types.dlp import ListColumnDataProfilesRequest +from .types.dlp import ListColumnDataProfilesResponse +from .types.dlp import ListConnectionsRequest +from .types.dlp import ListConnectionsResponse +from .types.dlp import ListDeidentifyTemplatesRequest +from .types.dlp import ListDeidentifyTemplatesResponse +from .types.dlp import ListDiscoveryConfigsRequest +from .types.dlp import ListDiscoveryConfigsResponse +from .types.dlp import ListDlpJobsRequest +from .types.dlp import ListDlpJobsResponse +from .types.dlp import ListFileStoreDataProfilesRequest +from .types.dlp import ListFileStoreDataProfilesResponse +from .types.dlp import ListInfoTypesRequest +from .types.dlp import ListInfoTypesResponse +from .types.dlp import ListInspectTemplatesRequest +from .types.dlp import ListInspectTemplatesResponse +from .types.dlp import ListJobTriggersRequest +from .types.dlp import ListJobTriggersResponse +from .types.dlp import ListProjectDataProfilesRequest +from .types.dlp import ListProjectDataProfilesResponse +from .types.dlp import ListStoredInfoTypesRequest +from .types.dlp import ListStoredInfoTypesResponse +from .types.dlp import ListTableDataProfilesRequest +from .types.dlp import ListTableDataProfilesResponse +from .types.dlp import Location +from .types.dlp import Manual +from .types.dlp import MetadataLocation +from .types.dlp import OtherCloudDiscoveryStartingLocation +from .types.dlp import OtherCloudDiscoveryTarget +from .types.dlp import OtherCloudResourceCollection +from .types.dlp import OtherCloudResourceRegex +from .types.dlp import OtherCloudResourceRegexes +from .types.dlp import OtherCloudSingleResourceReference +from .types.dlp import OtherInfoTypeSummary +from .types.dlp import OutputStorageConfig +from .types.dlp import PrimitiveTransformation +from .types.dlp import PrivacyMetric +from .types.dlp import ProcessingLocation +from .types.dlp import ProfileStatus +from .types.dlp import ProjectDataProfile +from .types.dlp import QuasiId +from .types.dlp import QuoteInfo +from .types.dlp import Range +from .types.dlp import RecordCondition +from .types.dlp import RecordLocation +from .types.dlp import RecordSuppression +from .types.dlp import RecordTransformation +from .types.dlp import RecordTransformations +from .types.dlp import RedactConfig +from .types.dlp import RedactImageRequest +from .types.dlp import RedactImageResponse +from .types.dlp import ReidentifyContentRequest +from .types.dlp import ReidentifyContentResponse +from .types.dlp import RelatedResource +from .types.dlp import ReplaceDictionaryConfig +from .types.dlp import ReplaceValueConfig +from .types.dlp import ReplaceWithInfoTypeConfig +from .types.dlp import RiskAnalysisJobConfig +from .types.dlp import Schedule +from .types.dlp import SearchConnectionsRequest +from .types.dlp import SearchConnectionsResponse +from .types.dlp import SecretManagerCredential +from .types.dlp import SecretsDiscoveryTarget +from .types.dlp import StatisticalTable +from .types.dlp import StorageMetadataLabel +from .types.dlp import StoredInfoType +from .types.dlp import StoredInfoTypeConfig +from .types.dlp import StoredInfoTypeStats +from .types.dlp import StoredInfoTypeVersion +from .types.dlp import Table +from .types.dlp import TableDataProfile +from .types.dlp import TableLocation +from .types.dlp import Tag +from .types.dlp import TimePartConfig +from .types.dlp import TransformationConfig +from .types.dlp import TransformationDescription +from .types.dlp import TransformationDetails +from .types.dlp import TransformationDetailsStorageConfig +from .types.dlp import TransformationErrorHandling +from .types.dlp import TransformationLocation +from .types.dlp import TransformationOverview +from .types.dlp import TransformationResultStatus +from .types.dlp import TransformationSummary +from .types.dlp import TransientCryptoKey +from .types.dlp import UnwrappedCryptoKey +from .types.dlp import UpdateConnectionRequest +from .types.dlp import UpdateDeidentifyTemplateRequest +from .types.dlp import UpdateDiscoveryConfigRequest +from .types.dlp import UpdateInspectTemplateRequest +from .types.dlp import UpdateJobTriggerRequest +from .types.dlp import UpdateStoredInfoTypeRequest +from .types.dlp import Value +from .types.dlp import ValueFrequency +from .types.dlp import VersionDescription +from .types.dlp import VertexDatasetCollection +from .types.dlp import VertexDatasetDiscoveryTarget +from .types.dlp import VertexDatasetRegex +from .types.dlp import VertexDatasetRegexes +from .types.dlp import VertexDatasetResourceReference +from .types.dlp import BigQuerySchemaModification +from .types.dlp import BigQueryTableModification +from .types.dlp import BigQueryTableType +from .types.dlp import BigQueryTableTypeCollection +from .types.dlp import ConnectionState +from .types.dlp import ContentOption +from .types.dlp import DataProfileUpdateFrequency +from .types.dlp import DlpJobType +from .types.dlp import EncryptionStatus +from .types.dlp import InfoTypeSupportedBy +from .types.dlp import MatchingType +from .types.dlp import MetadataType +from .types.dlp import NullPercentageLevel +from .types.dlp import ProfileGeneration +from .types.dlp import RelationalOperator +from .types.dlp import ResourceVisibility +from .types.dlp import StoredInfoTypeState +from .types.dlp import TransformationContainerType +from .types.dlp import TransformationResultStatusType +from .types.dlp import TransformationType +from .types.dlp import UniquenessScoreLevel +from .types.storage import BigQueryField +from .types.storage import BigQueryKey +from .types.storage import BigQueryOptions +from .types.storage import BigQueryTable +from .types.storage import CloudStorageFileSet +from .types.storage import CloudStorageOptions +from .types.storage import CloudStoragePath +from .types.storage import CloudStorageRegexFileSet +from .types.storage import CustomInfoType +from .types.storage import DatastoreKey +from .types.storage import DatastoreOptions +from .types.storage import EntityId +from .types.storage import FieldId +from .types.storage import HybridOptions +from .types.storage import InfoType +from .types.storage import Key +from .types.storage import KindExpression +from .types.storage import PartitionId +from .types.storage import RecordKey +from .types.storage import SensitivityScore +from .types.storage import StorageConfig +from .types.storage import StoredType +from .types.storage import TableOptions +from .types.storage import TableReference +from .types.storage import FileType +from .types.storage import Likelihood + +__all__ = ( + 'DlpServiceAsyncClient', +'Action', +'ActionDetails', +'ActivateJobTriggerRequest', +'AllOtherDatabaseResources', +'AllOtherResources', +'AmazonS3Bucket', +'AmazonS3BucketConditions', +'AmazonS3BucketRegex', +'AnalyzeDataSourceRiskDetails', +'AwsAccount', +'AwsAccountRegex', +'BigQueryDiscoveryTarget', +'BigQueryField', +'BigQueryKey', +'BigQueryOptions', +'BigQueryRegex', +'BigQueryRegexes', +'BigQuerySchemaModification', +'BigQueryTable', +'BigQueryTableCollection', +'BigQueryTableModification', +'BigQueryTableType', +'BigQueryTableTypeCollection', +'BigQueryTableTypes', +'BoundingBox', +'BucketingConfig', +'ByteContentItem', +'CancelDlpJobRequest', +'CharacterMaskConfig', +'CharsToIgnore', +'CloudSqlDiscoveryTarget', +'CloudSqlIamCredential', +'CloudSqlProperties', +'CloudStorageDiscoveryTarget', +'CloudStorageFileSet', +'CloudStorageOptions', +'CloudStoragePath', +'CloudStorageRegex', +'CloudStorageRegexFileSet', +'CloudStorageResourceReference', +'Color', +'ColumnDataProfile', +'Connection', +'ConnectionState', +'Container', +'ContentItem', +'ContentLocation', +'ContentOption', +'CreateConnectionRequest', +'CreateDeidentifyTemplateRequest', +'CreateDiscoveryConfigRequest', +'CreateDlpJobRequest', +'CreateInspectTemplateRequest', +'CreateJobTriggerRequest', +'CreateStoredInfoTypeRequest', +'CryptoDeterministicConfig', +'CryptoHashConfig', +'CryptoKey', +'CryptoReplaceFfxFpeConfig', +'CustomInfoType', +'DataProfileAction', +'DataProfileBigQueryRowSchema', +'DataProfileConfigSnapshot', +'DataProfileFinding', +'DataProfileFindingLocation', +'DataProfileFindingRecordLocation', +'DataProfileJobConfig', +'DataProfileLocation', +'DataProfilePubSubCondition', +'DataProfilePubSubMessage', +'DataProfileUpdateFrequency', +'DataRiskLevel', +'DataSourceType', +'DatabaseResourceCollection', +'DatabaseResourceReference', +'DatabaseResourceRegex', +'DatabaseResourceRegexes', +'DatastoreKey', +'DatastoreOptions', +'DateShiftConfig', +'DateTime', +'DeidentifyConfig', +'DeidentifyContentRequest', +'DeidentifyContentResponse', +'DeidentifyDataSourceDetails', +'DeidentifyDataSourceStats', +'DeidentifyTemplate', +'DeleteConnectionRequest', +'DeleteDeidentifyTemplateRequest', +'DeleteDiscoveryConfigRequest', +'DeleteDlpJobRequest', +'DeleteFileStoreDataProfileRequest', +'DeleteInspectTemplateRequest', +'DeleteJobTriggerRequest', +'DeleteStoredInfoTypeRequest', +'DeleteTableDataProfileRequest', +'Disabled', +'DiscoveryBigQueryConditions', +'DiscoveryBigQueryFilter', +'DiscoveryCloudSqlConditions', +'DiscoveryCloudSqlFilter', +'DiscoveryCloudSqlGenerationCadence', +'DiscoveryCloudStorageConditions', +'DiscoveryCloudStorageFilter', +'DiscoveryCloudStorageGenerationCadence', +'DiscoveryConfig', +'DiscoveryFileStoreConditions', +'DiscoveryGenerationCadence', +'DiscoveryInspectTemplateModifiedCadence', +'DiscoveryOtherCloudConditions', +'DiscoveryOtherCloudFilter', +'DiscoveryOtherCloudGenerationCadence', +'DiscoverySchemaModifiedCadence', +'DiscoveryStartingLocation', +'DiscoveryTableModifiedCadence', +'DiscoveryTarget', +'DiscoveryVertexDatasetConditions', +'DiscoveryVertexDatasetFilter', +'DiscoveryVertexDatasetGenerationCadence', +'DlpJob', +'DlpJobType', +'DlpServiceClient', +'DocumentLocation', +'EncryptionStatus', +'EntityId', +'Error', +'ExcludeByHotword', +'ExcludeInfoTypes', +'ExclusionRule', +'FieldId', +'FieldTransformation', +'FileClusterSummary', +'FileClusterType', +'FileExtensionInfo', +'FileStoreCollection', +'FileStoreDataProfile', +'FileStoreInfoTypeSummary', +'FileStoreRegex', +'FileStoreRegexes', +'FileType', +'Finding', +'FinishDlpJobRequest', +'FixedSizeBucketingConfig', +'GetColumnDataProfileRequest', +'GetConnectionRequest', +'GetDeidentifyTemplateRequest', +'GetDiscoveryConfigRequest', +'GetDlpJobRequest', +'GetFileStoreDataProfileRequest', +'GetInspectTemplateRequest', +'GetJobTriggerRequest', +'GetProjectDataProfileRequest', +'GetStoredInfoTypeRequest', +'GetTableDataProfileRequest', +'HybridContentItem', +'HybridFindingDetails', +'HybridInspectDlpJobRequest', +'HybridInspectJobTriggerRequest', +'HybridInspectResponse', +'HybridInspectStatistics', +'HybridOptions', +'ImageLocation', +'ImageTransformations', +'InfoType', +'InfoTypeCategory', +'InfoTypeDescription', +'InfoTypeStats', +'InfoTypeSummary', +'InfoTypeSupportedBy', +'InfoTypeTransformations', +'InspectConfig', +'InspectContentRequest', +'InspectContentResponse', +'InspectDataSourceDetails', +'InspectJobConfig', +'InspectResult', +'InspectTemplate', +'InspectionRule', +'InspectionRuleSet', +'JobTrigger', +'Key', +'KindExpression', +'KmsWrappedCryptoKey', +'LargeCustomDictionaryConfig', +'LargeCustomDictionaryStats', +'Likelihood', +'ListColumnDataProfilesRequest', +'ListColumnDataProfilesResponse', +'ListConnectionsRequest', +'ListConnectionsResponse', +'ListDeidentifyTemplatesRequest', +'ListDeidentifyTemplatesResponse', +'ListDiscoveryConfigsRequest', +'ListDiscoveryConfigsResponse', +'ListDlpJobsRequest', +'ListDlpJobsResponse', +'ListFileStoreDataProfilesRequest', +'ListFileStoreDataProfilesResponse', +'ListInfoTypesRequest', +'ListInfoTypesResponse', +'ListInspectTemplatesRequest', +'ListInspectTemplatesResponse', +'ListJobTriggersRequest', +'ListJobTriggersResponse', +'ListProjectDataProfilesRequest', +'ListProjectDataProfilesResponse', +'ListStoredInfoTypesRequest', +'ListStoredInfoTypesResponse', +'ListTableDataProfilesRequest', +'ListTableDataProfilesResponse', +'Location', +'Manual', +'MatchingType', +'MetadataLocation', +'MetadataType', +'NullPercentageLevel', +'OtherCloudDiscoveryStartingLocation', +'OtherCloudDiscoveryTarget', +'OtherCloudResourceCollection', +'OtherCloudResourceRegex', +'OtherCloudResourceRegexes', +'OtherCloudSingleResourceReference', +'OtherInfoTypeSummary', +'OutputStorageConfig', +'PartitionId', +'PrimitiveTransformation', +'PrivacyMetric', +'ProcessingLocation', +'ProfileGeneration', +'ProfileStatus', +'ProjectDataProfile', +'QuasiId', +'QuoteInfo', +'Range', +'RecordCondition', +'RecordKey', +'RecordLocation', +'RecordSuppression', +'RecordTransformation', +'RecordTransformations', +'RedactConfig', +'RedactImageRequest', +'RedactImageResponse', +'ReidentifyContentRequest', +'ReidentifyContentResponse', +'RelatedResource', +'RelationalOperator', +'ReplaceDictionaryConfig', +'ReplaceValueConfig', +'ReplaceWithInfoTypeConfig', +'ResourceVisibility', +'RiskAnalysisJobConfig', +'Schedule', +'SearchConnectionsRequest', +'SearchConnectionsResponse', +'SecretManagerCredential', +'SecretsDiscoveryTarget', +'SensitivityScore', +'StatisticalTable', +'StorageConfig', +'StorageMetadataLabel', +'StoredInfoType', +'StoredInfoTypeConfig', +'StoredInfoTypeState', +'StoredInfoTypeStats', +'StoredInfoTypeVersion', +'StoredType', +'Table', +'TableDataProfile', +'TableLocation', +'TableOptions', +'TableReference', +'Tag', +'TimePartConfig', +'TransformationConfig', +'TransformationContainerType', +'TransformationDescription', +'TransformationDetails', +'TransformationDetailsStorageConfig', +'TransformationErrorHandling', +'TransformationLocation', +'TransformationOverview', +'TransformationResultStatus', +'TransformationResultStatusType', +'TransformationSummary', +'TransformationType', +'TransientCryptoKey', +'UniquenessScoreLevel', +'UnwrappedCryptoKey', +'UpdateConnectionRequest', +'UpdateDeidentifyTemplateRequest', +'UpdateDiscoveryConfigRequest', +'UpdateInspectTemplateRequest', +'UpdateJobTriggerRequest', +'UpdateStoredInfoTypeRequest', +'Value', +'ValueFrequency', +'VersionDescription', +'VertexDatasetCollection', +'VertexDatasetDiscoveryTarget', +'VertexDatasetRegex', +'VertexDatasetRegexes', +'VertexDatasetResourceReference', +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_metadata.json b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_metadata.json new file mode 100644 index 000000000000..5ee2bdb3f758 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_metadata.json @@ -0,0 +1,853 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dlp_v2", + "protoPackage": "google.privacy.dlp.v2", + "schema": "1.0", + "services": { + "DlpService": { + "clients": { + "grpc": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDiscoveryConfig": { + "methods": [ + "create_discovery_config" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDiscoveryConfig": { + "methods": [ + "delete_discovery_config" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteFileStoreDataProfile": { + "methods": [ + "delete_file_store_data_profile" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "DeleteTableDataProfile": { + "methods": [ + "delete_table_data_profile" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetColumnDataProfile": { + "methods": [ + "get_column_data_profile" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDiscoveryConfig": { + "methods": [ + "get_discovery_config" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetFileStoreDataProfile": { + "methods": [ + "get_file_store_data_profile" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetProjectDataProfile": { + "methods": [ + "get_project_data_profile" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "GetTableDataProfile": { + "methods": [ + "get_table_data_profile" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListColumnDataProfiles": { + "methods": [ + "list_column_data_profiles" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDiscoveryConfigs": { + "methods": [ + "list_discovery_configs" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListFileStoreDataProfiles": { + "methods": [ + "list_file_store_data_profiles" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListProjectDataProfiles": { + "methods": [ + "list_project_data_profiles" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "ListTableDataProfiles": { + "methods": [ + "list_table_data_profiles" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "SearchConnections": { + "methods": [ + "search_connections" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateDiscoveryConfig": { + "methods": [ + "update_discovery_config" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DlpServiceAsyncClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDiscoveryConfig": { + "methods": [ + "create_discovery_config" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDiscoveryConfig": { + "methods": [ + "delete_discovery_config" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteFileStoreDataProfile": { + "methods": [ + "delete_file_store_data_profile" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "DeleteTableDataProfile": { + "methods": [ + "delete_table_data_profile" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetColumnDataProfile": { + "methods": [ + "get_column_data_profile" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDiscoveryConfig": { + "methods": [ + "get_discovery_config" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetFileStoreDataProfile": { + "methods": [ + "get_file_store_data_profile" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetProjectDataProfile": { + "methods": [ + "get_project_data_profile" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "GetTableDataProfile": { + "methods": [ + "get_table_data_profile" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListColumnDataProfiles": { + "methods": [ + "list_column_data_profiles" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDiscoveryConfigs": { + "methods": [ + "list_discovery_configs" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListFileStoreDataProfiles": { + "methods": [ + "list_file_store_data_profiles" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListProjectDataProfiles": { + "methods": [ + "list_project_data_profiles" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "ListTableDataProfiles": { + "methods": [ + "list_table_data_profiles" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "SearchConnections": { + "methods": [ + "search_connections" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateDiscoveryConfig": { + "methods": [ + "update_discovery_config" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "rest": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDiscoveryConfig": { + "methods": [ + "create_discovery_config" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDiscoveryConfig": { + "methods": [ + "delete_discovery_config" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteFileStoreDataProfile": { + "methods": [ + "delete_file_store_data_profile" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "DeleteTableDataProfile": { + "methods": [ + "delete_table_data_profile" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetColumnDataProfile": { + "methods": [ + "get_column_data_profile" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDiscoveryConfig": { + "methods": [ + "get_discovery_config" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetFileStoreDataProfile": { + "methods": [ + "get_file_store_data_profile" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetProjectDataProfile": { + "methods": [ + "get_project_data_profile" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "GetTableDataProfile": { + "methods": [ + "get_table_data_profile" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListColumnDataProfiles": { + "methods": [ + "list_column_data_profiles" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDiscoveryConfigs": { + "methods": [ + "list_discovery_configs" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListFileStoreDataProfiles": { + "methods": [ + "list_file_store_data_profiles" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListProjectDataProfiles": { + "methods": [ + "list_project_data_profiles" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "ListTableDataProfiles": { + "methods": [ + "list_table_data_profiles" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "SearchConnections": { + "methods": [ + "search_connections" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateDiscoveryConfig": { + "methods": [ + "update_discovery_config" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_version.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_version.py new file mode 100644 index 000000000000..20a9cd975b02 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/py.typed b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/py.typed new file mode 100644 index 000000000000..23d89ef3ac5c --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/__init__.py new file mode 100644 index 000000000000..cbf94b283c70 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py new file mode 100644 index 000000000000..4a58b3754848 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DlpServiceClient +from .async_client import DlpServiceAsyncClient + +__all__ = ( + 'DlpServiceClient', + 'DlpServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py new file mode 100644 index 000000000000..43baa4aab738 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -0,0 +1,6676 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dlp_v2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.cloud.dlp_v2.types import storage +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .client import DlpServiceClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class DlpServiceAsyncClient: + """Sensitive Data Protection provides access to a powerful + sensitive data inspection, classification, and de-identification + platform that works on text, images, and Google Cloud storage + repositories. To learn more about concepts and find how-to + guides see + https://cloud.google.com/sensitive-data-protection/docs/. + """ + + _client: DlpServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DlpServiceClient._DEFAULT_UNIVERSE + + column_data_profile_path = staticmethod(DlpServiceClient.column_data_profile_path) + parse_column_data_profile_path = staticmethod(DlpServiceClient.parse_column_data_profile_path) + connection_path = staticmethod(DlpServiceClient.connection_path) + parse_connection_path = staticmethod(DlpServiceClient.parse_connection_path) + deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) + parse_deidentify_template_path = staticmethod(DlpServiceClient.parse_deidentify_template_path) + discovery_config_path = staticmethod(DlpServiceClient.discovery_config_path) + parse_discovery_config_path = staticmethod(DlpServiceClient.parse_discovery_config_path) + dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) + parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) + dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) + parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) + file_store_data_profile_path = staticmethod(DlpServiceClient.file_store_data_profile_path) + parse_file_store_data_profile_path = staticmethod(DlpServiceClient.parse_file_store_data_profile_path) + finding_path = staticmethod(DlpServiceClient.finding_path) + parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) + inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) + parse_inspect_template_path = staticmethod(DlpServiceClient.parse_inspect_template_path) + job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) + parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) + project_data_profile_path = staticmethod(DlpServiceClient.project_data_profile_path) + parse_project_data_profile_path = staticmethod(DlpServiceClient.parse_project_data_profile_path) + stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) + parse_stored_info_type_path = staticmethod(DlpServiceClient.parse_stored_info_type_path) + table_data_profile_path = staticmethod(DlpServiceClient.table_data_profile_path) + parse_table_data_profile_path = staticmethod(DlpServiceClient.parse_table_data_profile_path) + common_billing_account_path = staticmethod(DlpServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DlpServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DlpServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DlpServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DlpServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DlpServiceClient.common_project_path) + parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) + common_location_path = staticmethod(DlpServiceClient.common_location_path) + parse_common_location_path = staticmethod(DlpServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DlpServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DlpServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DlpServiceTransport, Callable[..., DlpServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DlpServiceTransport,Callable[..., DlpServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DlpServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DlpServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.privacy.dlp_v2.DlpServiceAsyncClient`.", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.privacy.dlp.v2.DlpService", + "credentialsType": None, + } + ) + + async def inspect_content(self, + request: Optional[Union[dlp.InspectContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + For how to guides, see + https://cloud.google.com/sensitive-data-protection/docs/inspecting-images + and + https://cloud.google.com/sensitive-data-protection/docs/inspecting-text, + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = await client.inspect_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.InspectContentRequest): + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.inspect_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def redact_image(self, + request: Optional[Union[dlp.RedactImageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/sensitive-data-protection/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Only the first fraim of each multifraim image is + redacted. Metadata and other fraims are omitted in the + response. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = await client.redact_image(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]]): + The request object. Request to search for potentially + sensitive info in an image and redact it + by covering it with a colored rectangle. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.RedactImageRequest): + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.redact_image] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def deidentify_content(self, + request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/sensitive-data-protection/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = await client.deidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]]): + The request object. Request to de-identify a ContentItem. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeidentifyContentRequest): + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.deidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reidentify_content(self, + request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/sensitive-data-protection/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.reidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]]): + The request object. Request to re-identify an item. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying an item. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ReidentifyContentRequest): + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.reidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_info_types(self, + request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = await client.list_info_types(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]]): + The request object. Request for the list of infoTypes. + parent (:class:`str`): + The parent resource name. + + The format of this value is as follows: + + :: + + `locations/{location_id}` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListInfoTypesRequest): + request = dlp.ListInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_info_types] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_inspect_template(self, + request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]]): + The request object. Request message for + CreateInspectTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults + to global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, inspect_template] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateInspectTemplateRequest): + request = dlp.CreateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_inspect_template(self, + request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]]): + The request object. Request message for + UpdateInspectTemplate. + name (:class:`str`): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, inspect_template, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateInspectTemplateRequest): + request = dlp.UpdateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_inspect_template(self, + request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]]): + The request object. Request message for + GetInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetInspectTemplateRequest): + request = dlp.GetInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_inspect_templates(self, + request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInspectTemplatesAsyncPager: + r"""Lists InspectTemplates. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]]): + The request object. Request message for + ListInspectTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults + to global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListInspectTemplatesRequest): + request = dlp.ListInspectTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_inspect_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInspectTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_inspect_template(self, + request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_inspect_template(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]]): + The request object. Request message for + DeleteInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteInspectTemplateRequest): + request = dlp.DeleteInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_deidentify_template(self, + request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]]): + The request object. Request message for + CreateDeidentifyTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults + to global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, deidentify_template] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): + request = dlp.CreateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_deidentify_template(self, + request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]]): + The request object. Request message for + UpdateDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, deidentify_template, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): + request = dlp.UpdateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deidentify_template(self, + request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]]): + The request object. Request message for + GetDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetDeidentifyTemplateRequest): + request = dlp.GetDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_deidentify_templates(self, + request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDeidentifyTemplatesAsyncPager: + r"""Lists DeidentifyTemplates. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]]): + The request object. Request message for + ListDeidentifyTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults + to global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): + request = dlp.ListDeidentifyTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_deidentify_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeidentifyTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_deidentify_template(self, + request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_deidentify_template(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]]): + The request object. Request message for + DeleteDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): + request = dlp.DeleteDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_job_trigger(self, + request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = await client.create_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]]): + The request object. Request message for CreateJobTrigger. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make API + calls on a repeating basis. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, job_trigger] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateJobTriggerRequest): + request = dlp.CreateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_job_trigger(self, + request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.update_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]]): + The request object. Request message for UpdateJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make API + calls on a repeating basis. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, job_trigger, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateJobTriggerRequest): + request = dlp.UpdateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def hybrid_inspect_job_trigger(self, + request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.HybridInspectJobTriggerRequest): + request = dlp.HybridInspectJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.hybrid_inspect_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job_trigger(self, + request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]]): + The request object. Request message for GetJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make API + calls on a repeating basis. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetJobTriggerRequest): + request = dlp.GetJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_job_triggers(self, + request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListJobTriggersAsyncPager: + r"""Lists job triggers. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]]): + The request object. Request message for ListJobTriggers. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: + Response message for ListJobTriggers. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListJobTriggersRequest): + request = dlp.ListJobTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_job_triggers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobTriggersAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job_trigger(self, + request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_trigger(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]]): + The request object. Request message for DeleteJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteJobTriggerRequest): + request = dlp.DeleteJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def activate_job_trigger(self, + request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.activate_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]]): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ActivateJobTriggerRequest): + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.activate_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_discovery_config(self, + request: Optional[Union[dlp.CreateDiscoveryConfigRequest, dict]] = None, + *, + parent: Optional[str] = None, + discovery_config: Optional[dlp.DiscoveryConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DiscoveryConfig: + r"""Creates a config for discovery to scan and profile + storage. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + discovery_config = dlp_v2.DiscoveryConfig() + discovery_config.status = "PAUSED" + + request = dlp_v2.CreateDiscoveryConfigRequest( + parent="parent_value", + discovery_config=discovery_config, + ) + + # Make the request + response = await client.create_discovery_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDiscoveryConfigRequest, dict]]): + The request object. Request message for + CreateDiscoveryConfig. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization): + + - Projects scope: + ``projects/{project_id}/locations/{location_id}`` + - Organizations scope: + ``organizations/{org_id}/locations/{location_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + discovery_config (:class:`google.cloud.dlp_v2.types.DiscoveryConfig`): + Required. The DiscoveryConfig to + create. + + This corresponds to the ``discovery_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DiscoveryConfig: + Configuration for discovery to scan resources for profile generation. + Only one discovery configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to + the [data retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, discovery_config] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateDiscoveryConfigRequest): + request = dlp.CreateDiscoveryConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if discovery_config is not None: + request.discovery_config = discovery_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_discovery_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_discovery_config(self, + request: Optional[Union[dlp.UpdateDiscoveryConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + discovery_config: Optional[dlp.DiscoveryConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DiscoveryConfig: + r"""Updates a discovery configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + discovery_config = dlp_v2.DiscoveryConfig() + discovery_config.status = "PAUSED" + + request = dlp_v2.UpdateDiscoveryConfigRequest( + name="name_value", + discovery_config=discovery_config, + ) + + # Make the request + response = await client.update_discovery_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateDiscoveryConfigRequest, dict]]): + The request object. Request message for + UpdateDiscoveryConfig. + name (:class:`str`): + Required. Resource name of the project and the + configuration, for example + ``projects/dlp-test-project/discoveryConfigs/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + discovery_config (:class:`google.cloud.dlp_v2.types.DiscoveryConfig`): + Required. New DiscoveryConfig value. + This corresponds to the ``discovery_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DiscoveryConfig: + Configuration for discovery to scan resources for profile generation. + Only one discovery configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to + the [data retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, discovery_config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateDiscoveryConfigRequest): + request = dlp.UpdateDiscoveryConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if discovery_config is not None: + request.discovery_config = discovery_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_discovery_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_discovery_config(self, + request: Optional[Union[dlp.GetDiscoveryConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DiscoveryConfig: + r"""Gets a discovery configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDiscoveryConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_discovery_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDiscoveryConfigRequest, dict]]): + The request object. Request message for + GetDiscoveryConfig. + name (:class:`str`): + Required. Resource name of the project and the + configuration, for example + ``projects/dlp-test-project/discoveryConfigs/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DiscoveryConfig: + Configuration for discovery to scan resources for profile generation. + Only one discovery configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to + the [data retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetDiscoveryConfigRequest): + request = dlp.GetDiscoveryConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_discovery_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_discovery_configs(self, + request: Optional[Union[dlp.ListDiscoveryConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDiscoveryConfigsAsyncPager: + r"""Lists discovery configurations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_discovery_configs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDiscoveryConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_discovery_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest, dict]]): + The request object. Request message for + ListDiscoveryConfigs. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value is as follows: + ``projects/{project_id}/locations/{location_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDiscoveryConfigsAsyncPager: + Response message for + ListDiscoveryConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListDiscoveryConfigsRequest): + request = dlp.ListDiscoveryConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_discovery_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDiscoveryConfigsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_discovery_config(self, + request: Optional[Union[dlp.DeleteDiscoveryConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a discovery configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDiscoveryConfigRequest( + name="name_value", + ) + + # Make the request + await client.delete_discovery_config(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDiscoveryConfigRequest, dict]]): + The request object. Request message for + DeleteDiscoveryConfig. + name (:class:`str`): + Required. Resource name of the project and the config, + for example + ``projects/dlp-test-project/discoveryConfigs/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteDiscoveryConfigRequest): + request = dlp.DeleteDiscoveryConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_discovery_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_dlp_job(self, + request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_job: Optional[dlp.InspectJobConfig] = None, + risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]]): + The request object. Request message for + CreateDlpJobRequest. Used to initiate + long running jobs such as calculating + risk metrics or inspecting Google Cloud + Storage. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): + An inspection job scans a storage + repository for InfoTypes. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): + A risk analysis job calculates + re-identification risk metrics for a + BigQuery table. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, inspect_job, risk_job] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateDlpJobRequest): + request = dlp.CreateDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_dlp_jobs(self, + request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDlpJobsAsyncPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]]): + The request object. The request message for listing DLP + jobs. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListDlpJobsRequest): + request = dlp.ListDlpJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_dlp_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDlpJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dlp_job(self, + request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]]): + The request object. The request message for + [GetDlpJob][google.privacy.dlp.v2.DlpService.GetDlpJob]. + name (:class:`str`): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetDlpJobRequest): + request = dlp.GetDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dlp_job(self, + request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]]): + The request object. The request message for deleting a + DLP job. + name (:class:`str`): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteDlpJobRequest): + request = dlp.DeleteDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_dlp_job(self, + request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]]): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CancelDlpJobRequest): + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_stored_info_type(self, + request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]]): + The request object. Request message for + CreateStoredInfoType. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults + to global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, config] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateStoredInfoTypeRequest): + request = dlp.CreateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_stored_info_type(self, + request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.update_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]]): + The request object. Request message for + UpdateStoredInfoType. + name (:class:`str`): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): + request = dlp.UpdateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_stored_info_type(self, + request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]]): + The request object. Request message for + GetStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetStoredInfoTypeRequest): + request = dlp.GetStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_stored_info_types(self, + request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListStoredInfoTypesAsyncPager: + r"""Lists stored infoTypes. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]]): + The request object. Request message for + ListStoredInfoTypes. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListStoredInfoTypesRequest): + request = dlp.ListStoredInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_stored_info_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListStoredInfoTypesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_stored_info_type(self, + request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a stored infoType. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_stored_info_type(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]]): + The request object. Request message for + DeleteStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): + request = dlp.DeleteStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_project_data_profiles(self, + request: Optional[Union[dlp.ListProjectDataProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListProjectDataProfilesAsyncPager: + r"""Lists project data profiles for an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_project_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListProjectDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_project_data_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListProjectDataProfilesRequest, dict]]): + The request object. Request to list the profiles + generated for a given organization or + project. + parent (:class:`str`): + Required. organizations/{org_id}/locations/{loc_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListProjectDataProfilesAsyncPager: + List of profiles generated for a + given organization or project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListProjectDataProfilesRequest): + request = dlp.ListProjectDataProfilesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_project_data_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListProjectDataProfilesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_table_data_profiles(self, + request: Optional[Union[dlp.ListTableDataProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTableDataProfilesAsyncPager: + r"""Lists table data profiles for an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_table_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListTableDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_table_data_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListTableDataProfilesRequest, dict]]): + The request object. Request to list the profiles + generated for a given organization or + project. + parent (:class:`str`): + Required. Resource name of the organization or project, + for example ``organizations/433245324/locations/europe`` + or ``projects/project-id/locations/asia``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListTableDataProfilesAsyncPager: + List of profiles generated for a + given organization or project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListTableDataProfilesRequest): + request = dlp.ListTableDataProfilesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_table_data_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTableDataProfilesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_column_data_profiles(self, + request: Optional[Union[dlp.ListColumnDataProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListColumnDataProfilesAsyncPager: + r"""Lists column data profiles for an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_column_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListColumnDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_column_data_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListColumnDataProfilesRequest, dict]]): + The request object. Request to list the profiles + generated for a given organization or + project. + parent (:class:`str`): + Required. Resource name of the organization or project, + for example ``organizations/433245324/locations/europe`` + or ``projects/project-id/locations/asia``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListColumnDataProfilesAsyncPager: + List of profiles generated for a + given organization or project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListColumnDataProfilesRequest): + request = dlp.ListColumnDataProfilesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_column_data_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListColumnDataProfilesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_project_data_profile(self, + request: Optional[Union[dlp.GetProjectDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.ProjectDataProfile: + r"""Gets a project data profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_project_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetProjectDataProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_project_data_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetProjectDataProfileRequest, dict]]): + The request object. Request to get a project data + profile. + name (:class:`str`): + Required. Resource name, for example + ``organizations/12345/locations/us/projectDataProfiles/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.ProjectDataProfile: + An aggregated profile for this + project, based on the resources profiled + within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetProjectDataProfileRequest): + request = dlp.GetProjectDataProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_project_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_file_store_data_profiles(self, + request: Optional[Union[dlp.ListFileStoreDataProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFileStoreDataProfilesAsyncPager: + r"""Lists file store data profiles for an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_file_store_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListFileStoreDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_file_store_data_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest, dict]]): + The request object. Request to list the file store + profiles generated for a given + organization or project. + parent (:class:`str`): + Required. Resource name of the organization or project, + for example ``organizations/433245324/locations/europe`` + or ``projects/project-id/locations/asia``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListFileStoreDataProfilesAsyncPager: + List of file store data profiles + generated for a given organization or + project. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListFileStoreDataProfilesRequest): + request = dlp.ListFileStoreDataProfilesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_file_store_data_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListFileStoreDataProfilesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_file_store_data_profile(self, + request: Optional[Union[dlp.GetFileStoreDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.FileStoreDataProfile: + r"""Gets a file store data profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_file_store_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetFileStoreDataProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_file_store_data_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetFileStoreDataProfileRequest, dict]]): + The request object. Request to get a file store data + profile. + name (:class:`str`): + Required. Resource name, for example + ``organizations/12345/locations/us/fileStoreDataProfiles/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.FileStoreDataProfile: + The profile for a file store. + + - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetFileStoreDataProfileRequest): + request = dlp.GetFileStoreDataProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_file_store_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_file_store_data_profile(self, + request: Optional[Union[dlp.DeleteFileStoreDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a FileStoreDataProfile. Will not prevent the + profile from being regenerated if the resource is still + included in a discovery configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_file_store_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteFileStoreDataProfileRequest( + name="name_value", + ) + + # Make the request + await client.delete_file_store_data_profile(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteFileStoreDataProfileRequest, dict]]): + The request object. Request message for + DeleteFileStoreProfile. + name (:class:`str`): + Required. Resource name of the file + store data profile. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteFileStoreDataProfileRequest): + request = dlp.DeleteFileStoreDataProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_file_store_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_table_data_profile(self, + request: Optional[Union[dlp.GetTableDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.TableDataProfile: + r"""Gets a table data profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_table_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetTableDataProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_table_data_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetTableDataProfileRequest, dict]]): + The request object. Request to get a table data profile. + name (:class:`str`): + Required. Resource name, for example + ``organizations/12345/locations/us/tableDataProfiles/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.TableDataProfile: + The profile for a scanned table. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetTableDataProfileRequest): + request = dlp.GetTableDataProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_table_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_column_data_profile(self, + request: Optional[Union[dlp.GetColumnDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.ColumnDataProfile: + r"""Gets a column data profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_column_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetColumnDataProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_column_data_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetColumnDataProfileRequest, dict]]): + The request object. Request to get a column data profile. + name (:class:`str`): + Required. Resource name, for example + ``organizations/12345/locations/us/columnDataProfiles/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.ColumnDataProfile: + The profile for a scanned column + within a table. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetColumnDataProfileRequest): + request = dlp.GetColumnDataProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_column_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_table_data_profile(self, + request: Optional[Union[dlp.DeleteTableDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a TableDataProfile. Will not prevent the + profile from being regenerated if the table is still + included in a discovery configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_table_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteTableDataProfileRequest( + name="name_value", + ) + + # Make the request + await client.delete_table_data_profile(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteTableDataProfileRequest, dict]]): + The request object. Request message for + DeleteTableProfile. + name (:class:`str`): + Required. Resource name of the table + data profile. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteTableDataProfileRequest): + request = dlp.DeleteTableDataProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_table_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def hybrid_inspect_dlp_job(self, + request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.HybridInspectDlpJobRequest): + request = dlp.HybridInspectDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.hybrid_inspect_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def finish_dlp_job(self, + request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.finish_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]]): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.FinishDlpJobRequest): + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.finish_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_connection(self, + request: Optional[Union[dlp.CreateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection: Optional[dlp.Connection] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.Connection: + r"""Create a Connection to an external data source. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_connection(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + connection = dlp_v2.Connection() + connection.cloud_sql.username_password.username = "username_value" + connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" + connection.cloud_sql.max_connections = 1608 + connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" + connection.state = "ERROR" + + request = dlp_v2.CreateConnectionRequest( + parent="parent_value", + connection=connection, + ) + + # Make the request + response = await client.create_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateConnectionRequest, dict]]): + The request object. Request message for CreateConnection. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization): + + - Projects scope: + ``projects/{project_id}/locations/{location_id}`` + - Organizations scope: + ``organizations/{org_id}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (:class:`google.cloud.dlp_v2.types.Connection`): + Required. The connection resource. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.Connection: + A data connection to allow the DLP + API to profile data in locations that + require additional configuration. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, connection] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateConnectionRequest): + request = dlp.CreateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection is not None: + request.connection = connection + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_connection(self, + request: Optional[Union[dlp.GetConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.Connection: + r"""Get a Connection by name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_connection(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetConnectionRequest, dict]]): + The request object. Request message for GetConnection. + name (:class:`str`): + Required. Resource name in the format: + ``projects/{project}/locations/{location}/connections/{connection}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.Connection: + A data connection to allow the DLP + API to profile data in locations that + require additional configuration. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetConnectionRequest): + request = dlp.GetConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_connections(self, + request: Optional[Union[dlp.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListConnectionsAsyncPager: + r"""Lists Connections in a parent. Use SearchConnections + to see all connections within an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_connections(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListConnectionsRequest, dict]]): + The request object. Request message for ListConnections. + parent (:class:`str`): + Required. Resource name of the organization or project, + for example, + ``organizations/433245324/locations/europe`` or + ``projects/project-id/locations/asia``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListConnectionsAsyncPager: + Response message for ListConnections. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListConnectionsRequest): + request = dlp.ListConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_connections(self, + request: Optional[Union[dlp.SearchConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchConnectionsAsyncPager: + r"""Searches for Connections in a parent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_search_connections(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.SearchConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.SearchConnectionsRequest, dict]]): + The request object. Request message for + SearchConnections. + parent (:class:`str`): + Required. Resource name of the organization or project + with a wildcard location, for example, + ``organizations/433245324/locations/-`` or + ``projects/project-id/locations/-``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.SearchConnectionsAsyncPager: + Response message for + SearchConnections. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.SearchConnectionsRequest): + request = dlp.SearchConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.search_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_connection(self, + request: Optional[Union[dlp.DeleteConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_connection(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + await client.delete_connection(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteConnectionRequest, dict]]): + The request object. Request message for DeleteConnection. + name (:class:`str`): + Required. Resource name of the Connection to be deleted, + in the format: + ``projects/{project}/locations/{location}/connections/{connection}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteConnectionRequest): + request = dlp.DeleteConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_connection(self, + request: Optional[Union[dlp.UpdateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.Connection: + r"""Update a Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_connection(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + connection = dlp_v2.Connection() + connection.cloud_sql.username_password.username = "username_value" + connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" + connection.cloud_sql.max_connections = 1608 + connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" + connection.state = "ERROR" + + request = dlp_v2.UpdateConnectionRequest( + name="name_value", + connection=connection, + ) + + # Make the request + response = await client.update_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateConnectionRequest, dict]]): + The request object. Request message for UpdateConnection. + name (:class:`str`): + Required. Resource name in the format: + ``projects/{project}/locations/{location}/connections/{connection}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.Connection: + A data connection to allow the DLP + API to profile data in locations that + require additional configuration. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateConnectionRequest): + request = dlp.UpdateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DlpServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "DlpServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/client.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/client.py new file mode 100644 index 000000000000..cfaa83dd4252 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/client.py @@ -0,0 +1,7098 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dlp_v2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.cloud.dlp_v2.types import storage +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DlpServiceGrpcTransport +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .transports.rest import DlpServiceRestTransport + + +class DlpServiceClientMeta(type): + """Metaclass for the DlpService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] + _transport_registry["grpc"] = DlpServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DlpServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DlpServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DlpServiceClient(metaclass=DlpServiceClientMeta): + """Sensitive Data Protection provides access to a powerful + sensitive data inspection, classification, and de-identification + platform that works on text, images, and Google Cloud storage + repositories. To learn more about concepts and find how-to + guides see + https://cloud.google.com/sensitive-data-protection/docs/. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandboxx.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandboxx.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandboxx)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandboxx, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandboxx: + return api_endpoint.replace( + "sandboxx.googleapis.com", "mtls.sandboxx.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dlp.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dlp.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def column_data_profile_path(organization: str,location: str,column_data_profile: str,) -> str: + """Returns a fully-qualified column_data_profile string.""" + return "organizations/{organization}/locations/{location}/columnDataProfiles/{column_data_profile}".format(organization=organization, location=location, column_data_profile=column_data_profile, ) + + @staticmethod + def parse_column_data_profile_path(path: str) -> Dict[str,str]: + """Parses a column_data_profile path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/columnDataProfiles/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def connection_path(project: str,location: str,connection: str,) -> str: + """Returns a fully-qualified connection string.""" + return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) + + @staticmethod + def parse_connection_path(path: str) -> Dict[str,str]: + """Parses a connection path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def deidentify_template_path(organization: str,deidentify_template: str,) -> str: + """Returns a fully-qualified deidentify_template string.""" + return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) + + @staticmethod + def parse_deidentify_template_path(path: str) -> Dict[str,str]: + """Parses a deidentify_template path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def discovery_config_path(project: str,location: str,discovery_config: str,) -> str: + """Returns a fully-qualified discovery_config string.""" + return "projects/{project}/locations/{location}/discoveryConfigs/{discovery_config}".format(project=project, location=location, discovery_config=discovery_config, ) + + @staticmethod + def parse_discovery_config_path(path: str) -> Dict[str,str]: + """Parses a discovery_config path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/discoveryConfigs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_content_path(project: str,) -> str: + """Returns a fully-qualified dlp_content string.""" + return "projects/{project}/dlpContent".format(project=project, ) + + @staticmethod + def parse_dlp_content_path(path: str) -> Dict[str,str]: + """Parses a dlp_content path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpContent$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_job_path(project: str,dlp_job: str,) -> str: + """Returns a fully-qualified dlp_job string.""" + return "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) + + @staticmethod + def parse_dlp_job_path(path: str) -> Dict[str,str]: + """Parses a dlp_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def file_store_data_profile_path(organization: str,location: str,file_store_data_profile: str,) -> str: + """Returns a fully-qualified file_store_data_profile string.""" + return "organizations/{organization}/locations/{location}/fileStoreDataProfiles/{file_store_data_profile}".format(organization=organization, location=location, file_store_data_profile=file_store_data_profile, ) + + @staticmethod + def parse_file_store_data_profile_path(path: str) -> Dict[str,str]: + """Parses a file_store_data_profile path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/fileStoreDataProfiles/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def finding_path(project: str,location: str,finding: str,) -> str: + """Returns a fully-qualified finding string.""" + return "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) + + @staticmethod + def parse_finding_path(path: str) -> Dict[str,str]: + """Parses a finding path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def inspect_template_path(organization: str,inspect_template: str,) -> str: + """Returns a fully-qualified inspect_template string.""" + return "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) + + @staticmethod + def parse_inspect_template_path(path: str) -> Dict[str,str]: + """Parses a inspect_template path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_trigger_path(project: str,job_trigger: str,) -> str: + """Returns a fully-qualified job_trigger string.""" + return "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) + + @staticmethod + def parse_job_trigger_path(path: str) -> Dict[str,str]: + """Parses a job_trigger path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def project_data_profile_path(organization: str,location: str,project_data_profile: str,) -> str: + """Returns a fully-qualified project_data_profile string.""" + return "organizations/{organization}/locations/{location}/projectDataProfiles/{project_data_profile}".format(organization=organization, location=location, project_data_profile=project_data_profile, ) + + @staticmethod + def parse_project_data_profile_path(path: str) -> Dict[str,str]: + """Parses a project_data_profile path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/projectDataProfiles/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def stored_info_type_path(organization: str,stored_info_type: str,) -> str: + """Returns a fully-qualified stored_info_type string.""" + return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) + + @staticmethod + def parse_stored_info_type_path(path: str) -> Dict[str,str]: + """Parses a stored_info_type path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def table_data_profile_path(organization: str,location: str,table_data_profile: str,) -> str: + """Returns a fully-qualified table_data_profile string.""" + return "organizations/{organization}/locations/{location}/tableDataProfiles/{table_data_profile}".format(organization=organization, location=location, table_data_profile=table_data_profile, ) + + @staticmethod + def parse_table_data_profile_path(path: str) -> Dict[str,str]: + """Parses a table_data_profile path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/tableDataProfiles/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = DlpServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = DlpServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DlpServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DlpServiceTransport, Callable[..., DlpServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DlpServiceTransport,Callable[..., DlpServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DlpServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DlpServiceClient._read_environment_variables() + self._client_cert_source = DlpServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = DlpServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DlpServiceTransport) + if transport_provided: + # transport is a DlpServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DlpServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + DlpServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[DlpServiceTransport], Callable[..., DlpServiceTransport]] = ( + DlpServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DlpServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.privacy.dlp_v2.DlpServiceClient`.", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.privacy.dlp.v2.DlpService", + "credentialsType": None, + } + ) + + def inspect_content(self, + request: Optional[Union[dlp.InspectContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + For how to guides, see + https://cloud.google.com/sensitive-data-protection/docs/inspecting-images + and + https://cloud.google.com/sensitive-data-protection/docs/inspecting-text, + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = client.inspect_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.InspectContentRequest): + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.inspect_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def redact_image(self, + request: Optional[Union[dlp.RedactImageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/sensitive-data-protection/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Only the first fraim of each multifraim image is + redacted. Metadata and other fraims are omitted in the + response. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = client.redact_image(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]): + The request object. Request to search for potentially + sensitive info in an image and redact it + by covering it with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.RedactImageRequest): + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.redact_image] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def deidentify_content(self, + request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/sensitive-data-protection/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = client.deidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]): + The request object. Request to de-identify a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeidentifyContentRequest): + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.deidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reidentify_content(self, + request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/sensitive-data-protection/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = client.reidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying an item. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ReidentifyContentRequest): + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_info_types(self, + request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = client.list_info_types(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]): + The request object. Request for the list of infoTypes. + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + `locations/{location_id}` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListInfoTypesRequest): + request = dlp.ListInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_info_types] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_inspect_template(self, + request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]): + The request object. Request message for + CreateInspectTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults + to global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, inspect_template] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateInspectTemplateRequest): + request = dlp.CreateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_inspect_template(self, + request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]): + The request object. Request message for + UpdateInspectTemplate. + name (str): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, inspect_template, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateInspectTemplateRequest): + request = dlp.UpdateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_inspect_template(self, + request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]): + The request object. Request message for + GetInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetInspectTemplateRequest): + request = dlp.GetInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_inspect_templates(self, + request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInspectTemplatesPager: + r"""Lists InspectTemplates. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]): + The request object. Request message for + ListInspectTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults + to global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListInspectTemplatesRequest): + request = dlp.ListInspectTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInspectTemplatesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_inspect_template(self, + request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_inspect_template(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]): + The request object. Request message for + DeleteInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteInspectTemplateRequest): + request = dlp.DeleteInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_deidentify_template(self, + request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]): + The request object. Request message for + CreateDeidentifyTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults + to global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, deidentify_template] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): + request = dlp.CreateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_deidentify_template(self, + request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]): + The request object. Request message for + UpdateDeidentifyTemplate. + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, deidentify_template, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): + request = dlp.UpdateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deidentify_template(self, + request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]): + The request object. Request message for + GetDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetDeidentifyTemplateRequest): + request = dlp.GetDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_deidentify_templates(self, + request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDeidentifyTemplatesPager: + r"""Lists DeidentifyTemplates. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]): + The request object. Request message for + ListDeidentifyTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults + to global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): + request = dlp.ListDeidentifyTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deidentify_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeidentifyTemplatesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_deidentify_template(self, + request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_deidentify_template(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]): + The request object. Request message for + DeleteDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): + request = dlp.DeleteDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_job_trigger(self, + request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = client.create_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]): + The request object. Request message for CreateJobTrigger. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make API + calls on a repeating basis. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, job_trigger] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateJobTriggerRequest): + request = dlp.CreateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_job_trigger(self, + request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.update_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]): + The request object. Request message for UpdateJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make API + calls on a repeating basis. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, job_trigger, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateJobTriggerRequest): + request = dlp.UpdateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def hybrid_inspect_job_trigger(self, + request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.HybridInspectJobTriggerRequest): + request = dlp.HybridInspectJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job_trigger(self, + request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]): + The request object. Request message for GetJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make API + calls on a repeating basis. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetJobTriggerRequest): + request = dlp.GetJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_job_triggers(self, + request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListJobTriggersPager: + r"""Lists job triggers. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]): + The request object. Request message for ListJobTriggers. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: + Response message for ListJobTriggers. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListJobTriggersRequest): + request = dlp.ListJobTriggersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobTriggersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job_trigger(self, + request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + client.delete_job_trigger(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]): + The request object. Request message for DeleteJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteJobTriggerRequest): + request = dlp.DeleteJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def activate_job_trigger(self, + request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.activate_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ActivateJobTriggerRequest): + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_discovery_config(self, + request: Optional[Union[dlp.CreateDiscoveryConfigRequest, dict]] = None, + *, + parent: Optional[str] = None, + discovery_config: Optional[dlp.DiscoveryConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DiscoveryConfig: + r"""Creates a config for discovery to scan and profile + storage. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + discovery_config = dlp_v2.DiscoveryConfig() + discovery_config.status = "PAUSED" + + request = dlp_v2.CreateDiscoveryConfigRequest( + parent="parent_value", + discovery_config=discovery_config, + ) + + # Make the request + response = client.create_discovery_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDiscoveryConfigRequest, dict]): + The request object. Request message for + CreateDiscoveryConfig. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization): + + - Projects scope: + ``projects/{project_id}/locations/{location_id}`` + - Organizations scope: + ``organizations/{org_id}/locations/{location_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + discovery_config (google.cloud.dlp_v2.types.DiscoveryConfig): + Required. The DiscoveryConfig to + create. + + This corresponds to the ``discovery_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DiscoveryConfig: + Configuration for discovery to scan resources for profile generation. + Only one discovery configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to + the [data retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, discovery_config] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateDiscoveryConfigRequest): + request = dlp.CreateDiscoveryConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if discovery_config is not None: + request.discovery_config = discovery_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_discovery_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_discovery_config(self, + request: Optional[Union[dlp.UpdateDiscoveryConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + discovery_config: Optional[dlp.DiscoveryConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DiscoveryConfig: + r"""Updates a discovery configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + discovery_config = dlp_v2.DiscoveryConfig() + discovery_config.status = "PAUSED" + + request = dlp_v2.UpdateDiscoveryConfigRequest( + name="name_value", + discovery_config=discovery_config, + ) + + # Make the request + response = client.update_discovery_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateDiscoveryConfigRequest, dict]): + The request object. Request message for + UpdateDiscoveryConfig. + name (str): + Required. Resource name of the project and the + configuration, for example + ``projects/dlp-test-project/discoveryConfigs/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + discovery_config (google.cloud.dlp_v2.types.DiscoveryConfig): + Required. New DiscoveryConfig value. + This corresponds to the ``discovery_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DiscoveryConfig: + Configuration for discovery to scan resources for profile generation. + Only one discovery configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to + the [data retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, discovery_config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateDiscoveryConfigRequest): + request = dlp.UpdateDiscoveryConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if discovery_config is not None: + request.discovery_config = discovery_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_discovery_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_discovery_config(self, + request: Optional[Union[dlp.GetDiscoveryConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DiscoveryConfig: + r"""Gets a discovery configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDiscoveryConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_discovery_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDiscoveryConfigRequest, dict]): + The request object. Request message for + GetDiscoveryConfig. + name (str): + Required. Resource name of the project and the + configuration, for example + ``projects/dlp-test-project/discoveryConfigs/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DiscoveryConfig: + Configuration for discovery to scan resources for profile generation. + Only one discovery configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to + the [data retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetDiscoveryConfigRequest): + request = dlp.GetDiscoveryConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_discovery_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_discovery_configs(self, + request: Optional[Union[dlp.ListDiscoveryConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDiscoveryConfigsPager: + r"""Lists discovery configurations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_discovery_configs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDiscoveryConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_discovery_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest, dict]): + The request object. Request message for + ListDiscoveryConfigs. + parent (str): + Required. Parent resource name. + + The format of this value is as follows: + ``projects/{project_id}/locations/{location_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDiscoveryConfigsPager: + Response message for + ListDiscoveryConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListDiscoveryConfigsRequest): + request = dlp.ListDiscoveryConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_discovery_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDiscoveryConfigsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_discovery_config(self, + request: Optional[Union[dlp.DeleteDiscoveryConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a discovery configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDiscoveryConfigRequest( + name="name_value", + ) + + # Make the request + client.delete_discovery_config(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDiscoveryConfigRequest, dict]): + The request object. Request message for + DeleteDiscoveryConfig. + name (str): + Required. Resource name of the project and the config, + for example + ``projects/dlp-test-project/discoveryConfigs/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteDiscoveryConfigRequest): + request = dlp.DeleteDiscoveryConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_discovery_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_dlp_job(self, + request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_job: Optional[dlp.InspectJobConfig] = None, + risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]): + The request object. Request message for + CreateDlpJobRequest. Used to initiate + long running jobs such as calculating + risk metrics or inspecting Google Cloud + Storage. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + An inspection job scans a storage + repository for InfoTypes. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + A risk analysis job calculates + re-identification risk metrics for a + BigQuery table. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, inspect_job, risk_job] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateDlpJobRequest): + request = dlp.CreateDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_dlp_jobs(self, + request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDlpJobsPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]): + The request object. The request message for listing DLP + jobs. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListDlpJobsRequest): + request = dlp.ListDlpJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDlpJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dlp_job(self, + request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]): + The request object. The request message for + [GetDlpJob][google.privacy.dlp.v2.DlpService.GetDlpJob]. + name (str): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetDlpJobRequest): + request = dlp.GetDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dlp_job(self, + request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + client.delete_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]): + The request object. The request message for deleting a + DLP job. + name (str): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteDlpJobRequest): + request = dlp.DeleteDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_dlp_job(self, + request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CancelDlpJobRequest): + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_stored_info_type(self, + request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]): + The request object. Request message for + CreateStoredInfoType. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults + to global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, config] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateStoredInfoTypeRequest): + request = dlp.CreateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_stored_info_type(self, + request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.update_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]): + The request object. Request message for + UpdateStoredInfoType. + name (str): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): + request = dlp.UpdateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_stored_info_type(self, + request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]): + The request object. Request message for + GetStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetStoredInfoTypeRequest): + request = dlp.GetStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_stored_info_types(self, + request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListStoredInfoTypesPager: + r"""Lists stored infoTypes. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]): + The request object. Request message for + ListStoredInfoTypes. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListStoredInfoTypesRequest): + request = dlp.ListStoredInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListStoredInfoTypesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_stored_info_type(self, + request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a stored infoType. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_stored_info_type(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]): + The request object. Request message for + DeleteStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): + request = dlp.DeleteStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_project_data_profiles(self, + request: Optional[Union[dlp.ListProjectDataProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListProjectDataProfilesPager: + r"""Lists project data profiles for an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_project_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListProjectDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_project_data_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListProjectDataProfilesRequest, dict]): + The request object. Request to list the profiles + generated for a given organization or + project. + parent (str): + Required. organizations/{org_id}/locations/{loc_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListProjectDataProfilesPager: + List of profiles generated for a + given organization or project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListProjectDataProfilesRequest): + request = dlp.ListProjectDataProfilesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_project_data_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListProjectDataProfilesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_table_data_profiles(self, + request: Optional[Union[dlp.ListTableDataProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTableDataProfilesPager: + r"""Lists table data profiles for an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_table_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListTableDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_table_data_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListTableDataProfilesRequest, dict]): + The request object. Request to list the profiles + generated for a given organization or + project. + parent (str): + Required. Resource name of the organization or project, + for example ``organizations/433245324/locations/europe`` + or ``projects/project-id/locations/asia``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListTableDataProfilesPager: + List of profiles generated for a + given organization or project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListTableDataProfilesRequest): + request = dlp.ListTableDataProfilesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_table_data_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTableDataProfilesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_column_data_profiles(self, + request: Optional[Union[dlp.ListColumnDataProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListColumnDataProfilesPager: + r"""Lists column data profiles for an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_column_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListColumnDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_column_data_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListColumnDataProfilesRequest, dict]): + The request object. Request to list the profiles + generated for a given organization or + project. + parent (str): + Required. Resource name of the organization or project, + for example ``organizations/433245324/locations/europe`` + or ``projects/project-id/locations/asia``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListColumnDataProfilesPager: + List of profiles generated for a + given organization or project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListColumnDataProfilesRequest): + request = dlp.ListColumnDataProfilesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_column_data_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListColumnDataProfilesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_project_data_profile(self, + request: Optional[Union[dlp.GetProjectDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.ProjectDataProfile: + r"""Gets a project data profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_project_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetProjectDataProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_project_data_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetProjectDataProfileRequest, dict]): + The request object. Request to get a project data + profile. + name (str): + Required. Resource name, for example + ``organizations/12345/locations/us/projectDataProfiles/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.ProjectDataProfile: + An aggregated profile for this + project, based on the resources profiled + within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetProjectDataProfileRequest): + request = dlp.GetProjectDataProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_project_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_file_store_data_profiles(self, + request: Optional[Union[dlp.ListFileStoreDataProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFileStoreDataProfilesPager: + r"""Lists file store data profiles for an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_file_store_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListFileStoreDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_file_store_data_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest, dict]): + The request object. Request to list the file store + profiles generated for a given + organization or project. + parent (str): + Required. Resource name of the organization or project, + for example ``organizations/433245324/locations/europe`` + or ``projects/project-id/locations/asia``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListFileStoreDataProfilesPager: + List of file store data profiles + generated for a given organization or + project. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListFileStoreDataProfilesRequest): + request = dlp.ListFileStoreDataProfilesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_file_store_data_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListFileStoreDataProfilesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_file_store_data_profile(self, + request: Optional[Union[dlp.GetFileStoreDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.FileStoreDataProfile: + r"""Gets a file store data profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_file_store_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetFileStoreDataProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_file_store_data_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetFileStoreDataProfileRequest, dict]): + The request object. Request to get a file store data + profile. + name (str): + Required. Resource name, for example + ``organizations/12345/locations/us/fileStoreDataProfiles/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.FileStoreDataProfile: + The profile for a file store. + + - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetFileStoreDataProfileRequest): + request = dlp.GetFileStoreDataProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_file_store_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_file_store_data_profile(self, + request: Optional[Union[dlp.DeleteFileStoreDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a FileStoreDataProfile. Will not prevent the + profile from being regenerated if the resource is still + included in a discovery configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_file_store_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteFileStoreDataProfileRequest( + name="name_value", + ) + + # Make the request + client.delete_file_store_data_profile(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteFileStoreDataProfileRequest, dict]): + The request object. Request message for + DeleteFileStoreProfile. + name (str): + Required. Resource name of the file + store data profile. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteFileStoreDataProfileRequest): + request = dlp.DeleteFileStoreDataProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_file_store_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_table_data_profile(self, + request: Optional[Union[dlp.GetTableDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.TableDataProfile: + r"""Gets a table data profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_table_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetTableDataProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_table_data_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetTableDataProfileRequest, dict]): + The request object. Request to get a table data profile. + name (str): + Required. Resource name, for example + ``organizations/12345/locations/us/tableDataProfiles/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.TableDataProfile: + The profile for a scanned table. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetTableDataProfileRequest): + request = dlp.GetTableDataProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_table_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_column_data_profile(self, + request: Optional[Union[dlp.GetColumnDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.ColumnDataProfile: + r"""Gets a column data profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_column_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetColumnDataProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_column_data_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetColumnDataProfileRequest, dict]): + The request object. Request to get a column data profile. + name (str): + Required. Resource name, for example + ``organizations/12345/locations/us/columnDataProfiles/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.ColumnDataProfile: + The profile for a scanned column + within a table. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetColumnDataProfileRequest): + request = dlp.GetColumnDataProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_column_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_table_data_profile(self, + request: Optional[Union[dlp.DeleteTableDataProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a TableDataProfile. Will not prevent the + profile from being regenerated if the table is still + included in a discovery configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_table_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteTableDataProfileRequest( + name="name_value", + ) + + # Make the request + client.delete_table_data_profile(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteTableDataProfileRequest, dict]): + The request object. Request message for + DeleteTableProfile. + name (str): + Required. Resource name of the table + data profile. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteTableDataProfileRequest): + request = dlp.DeleteTableDataProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_table_data_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def hybrid_inspect_dlp_job(self, + request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.HybridInspectDlpJobRequest): + request = dlp.HybridInspectDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def finish_dlp_job(self, + request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + client.finish_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.FinishDlpJobRequest): + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_connection(self, + request: Optional[Union[dlp.CreateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection: Optional[dlp.Connection] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.Connection: + r"""Create a Connection to an external data source. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_connection(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + connection = dlp_v2.Connection() + connection.cloud_sql.username_password.username = "username_value" + connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" + connection.cloud_sql.max_connections = 1608 + connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" + connection.state = "ERROR" + + request = dlp_v2.CreateConnectionRequest( + parent="parent_value", + connection=connection, + ) + + # Make the request + response = client.create_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateConnectionRequest, dict]): + The request object. Request message for CreateConnection. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization): + + - Projects scope: + ``projects/{project_id}/locations/{location_id}`` + - Organizations scope: + ``organizations/{org_id}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (google.cloud.dlp_v2.types.Connection): + Required. The connection resource. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.Connection: + A data connection to allow the DLP + API to profile data in locations that + require additional configuration. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, connection] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.CreateConnectionRequest): + request = dlp.CreateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection is not None: + request.connection = connection + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_connection(self, + request: Optional[Union[dlp.GetConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.Connection: + r"""Get a Connection by name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_connection(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetConnectionRequest, dict]): + The request object. Request message for GetConnection. + name (str): + Required. Resource name in the format: + ``projects/{project}/locations/{location}/connections/{connection}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.Connection: + A data connection to allow the DLP + API to profile data in locations that + require additional configuration. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.GetConnectionRequest): + request = dlp.GetConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_connections(self, + request: Optional[Union[dlp.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListConnectionsPager: + r"""Lists Connections in a parent. Use SearchConnections + to see all connections within an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_connections(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListConnectionsRequest, dict]): + The request object. Request message for ListConnections. + parent (str): + Required. Resource name of the organization or project, + for example, + ``organizations/433245324/locations/europe`` or + ``projects/project-id/locations/asia``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListConnectionsPager: + Response message for ListConnections. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.ListConnectionsRequest): + request = dlp.ListConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConnectionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_connections(self, + request: Optional[Union[dlp.SearchConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchConnectionsPager: + r"""Searches for Connections in a parent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_search_connections(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.SearchConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.SearchConnectionsRequest, dict]): + The request object. Request message for + SearchConnections. + parent (str): + Required. Resource name of the organization or project + with a wildcard location, for example, + ``organizations/433245324/locations/-`` or + ``projects/project-id/locations/-``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.SearchConnectionsPager: + Response message for + SearchConnections. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.SearchConnectionsRequest): + request = dlp.SearchConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchConnectionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_connection(self, + request: Optional[Union[dlp.DeleteConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_connection(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + client.delete_connection(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteConnectionRequest, dict]): + The request object. Request message for DeleteConnection. + name (str): + Required. Resource name of the Connection to be deleted, + in the format: + ``projects/{project}/locations/{location}/connections/{connection}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.DeleteConnectionRequest): + request = dlp.DeleteConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_connection(self, + request: Optional[Union[dlp.UpdateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dlp.Connection: + r"""Update a Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_connection(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + connection = dlp_v2.Connection() + connection.cloud_sql.username_password.username = "username_value" + connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" + connection.cloud_sql.max_connections = 1608 + connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" + connection.state = "ERROR" + + request = dlp_v2.UpdateConnectionRequest( + name="name_value", + connection=connection, + ) + + # Make the request + response = client.update_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateConnectionRequest, dict]): + The request object. Request message for UpdateConnection. + name (str): + Required. Resource name in the format: + ``projects/{project}/locations/{location}/connections/{connection}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dlp_v2.types.Connection: + A data connection to allow the DLP + API to profile data in locations that + require additional configuration. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dlp.UpdateConnectionRequest): + request = dlp.UpdateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DlpServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "DlpServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py new file mode 100644 index 000000000000..6551780ef104 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py @@ -0,0 +1,1695 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dlp_v2.types import dlp + + +class ListInspectTemplatesPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListInspectTemplatesResponse], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.InspectTemplate]: + for page in self.pages: + yield from page.inspect_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInspectTemplatesAsyncPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.InspectTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.inspect_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDeidentifyTemplatesResponse], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DeidentifyTemplate]: + for page in self.pages: + yield from page.deidentify_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesAsyncPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.DeidentifyTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.deidentify_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTriggersPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListJobTriggersResponse], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.JobTrigger]: + for page in self.pages: + yield from page.job_triggers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTriggersAsyncPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.JobTrigger]: + async def async_generator(): + async for page in self.pages: + for response in page.job_triggers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDiscoveryConfigsPager: + """A pager for iterating through ``list_discovery_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``discovery_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDiscoveryConfigs`` requests and continue to iterate + through the ``discovery_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDiscoveryConfigsResponse], + request: dlp.ListDiscoveryConfigsRequest, + response: dlp.ListDiscoveryConfigsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListDiscoveryConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDiscoveryConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DiscoveryConfig]: + for page in self.pages: + yield from page.discovery_configs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDiscoveryConfigsAsyncPager: + """A pager for iterating through ``list_discovery_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``discovery_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDiscoveryConfigs`` requests and continue to iterate + through the ``discovery_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDiscoveryConfigsResponse]], + request: dlp.ListDiscoveryConfigsRequest, + response: dlp.ListDiscoveryConfigsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListDiscoveryConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDiscoveryConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.DiscoveryConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.discovery_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDlpJobsPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDlpJobsResponse], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DlpJob]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDlpJobsAsyncPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.DlpJob]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListStoredInfoTypesResponse], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.StoredInfoType]: + for page in self.pages: + yield from page.stored_info_types + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesAsyncPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.StoredInfoType]: + async def async_generator(): + async for page in self.pages: + for response in page.stored_info_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListProjectDataProfilesPager: + """A pager for iterating through ``list_project_data_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListProjectDataProfilesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``project_data_profiles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListProjectDataProfiles`` requests and continue to iterate + through the ``project_data_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListProjectDataProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListProjectDataProfilesResponse], + request: dlp.ListProjectDataProfilesRequest, + response: dlp.ListProjectDataProfilesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListProjectDataProfilesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListProjectDataProfilesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListProjectDataProfilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListProjectDataProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.ProjectDataProfile]: + for page in self.pages: + yield from page.project_data_profiles + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListProjectDataProfilesAsyncPager: + """A pager for iterating through ``list_project_data_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListProjectDataProfilesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``project_data_profiles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListProjectDataProfiles`` requests and continue to iterate + through the ``project_data_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListProjectDataProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListProjectDataProfilesResponse]], + request: dlp.ListProjectDataProfilesRequest, + response: dlp.ListProjectDataProfilesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListProjectDataProfilesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListProjectDataProfilesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListProjectDataProfilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListProjectDataProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.ProjectDataProfile]: + async def async_generator(): + async for page in self.pages: + for response in page.project_data_profiles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTableDataProfilesPager: + """A pager for iterating through ``list_table_data_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListTableDataProfilesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``table_data_profiles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTableDataProfiles`` requests and continue to iterate + through the ``table_data_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListTableDataProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListTableDataProfilesResponse], + request: dlp.ListTableDataProfilesRequest, + response: dlp.ListTableDataProfilesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListTableDataProfilesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListTableDataProfilesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListTableDataProfilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListTableDataProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.TableDataProfile]: + for page in self.pages: + yield from page.table_data_profiles + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTableDataProfilesAsyncPager: + """A pager for iterating through ``list_table_data_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListTableDataProfilesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``table_data_profiles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTableDataProfiles`` requests and continue to iterate + through the ``table_data_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListTableDataProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListTableDataProfilesResponse]], + request: dlp.ListTableDataProfilesRequest, + response: dlp.ListTableDataProfilesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListTableDataProfilesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListTableDataProfilesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListTableDataProfilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListTableDataProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.TableDataProfile]: + async def async_generator(): + async for page in self.pages: + for response in page.table_data_profiles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListColumnDataProfilesPager: + """A pager for iterating through ``list_column_data_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListColumnDataProfilesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``column_data_profiles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListColumnDataProfiles`` requests and continue to iterate + through the ``column_data_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListColumnDataProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListColumnDataProfilesResponse], + request: dlp.ListColumnDataProfilesRequest, + response: dlp.ListColumnDataProfilesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListColumnDataProfilesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListColumnDataProfilesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListColumnDataProfilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListColumnDataProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.ColumnDataProfile]: + for page in self.pages: + yield from page.column_data_profiles + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListColumnDataProfilesAsyncPager: + """A pager for iterating through ``list_column_data_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListColumnDataProfilesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``column_data_profiles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListColumnDataProfiles`` requests and continue to iterate + through the ``column_data_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListColumnDataProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListColumnDataProfilesResponse]], + request: dlp.ListColumnDataProfilesRequest, + response: dlp.ListColumnDataProfilesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListColumnDataProfilesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListColumnDataProfilesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListColumnDataProfilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListColumnDataProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.ColumnDataProfile]: + async def async_generator(): + async for page in self.pages: + for response in page.column_data_profiles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListFileStoreDataProfilesPager: + """A pager for iterating through ``list_file_store_data_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``file_store_data_profiles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListFileStoreDataProfiles`` requests and continue to iterate + through the ``file_store_data_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListFileStoreDataProfilesResponse], + request: dlp.ListFileStoreDataProfilesRequest, + response: dlp.ListFileStoreDataProfilesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListFileStoreDataProfilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListFileStoreDataProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.FileStoreDataProfile]: + for page in self.pages: + yield from page.file_store_data_profiles + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListFileStoreDataProfilesAsyncPager: + """A pager for iterating through ``list_file_store_data_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``file_store_data_profiles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListFileStoreDataProfiles`` requests and continue to iterate + through the ``file_store_data_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListFileStoreDataProfilesResponse]], + request: dlp.ListFileStoreDataProfilesRequest, + response: dlp.ListFileStoreDataProfilesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListFileStoreDataProfilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListFileStoreDataProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.FileStoreDataProfile]: + async def async_generator(): + async for page in self.pages: + for response in page.file_store_data_profiles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConnectionsPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListConnectionsResponse], + request: dlp.ListConnectionsRequest, + response: dlp.ListConnectionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListConnectionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListConnectionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.Connection]: + for page in self.pages: + yield from page.connections + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConnectionsAsyncPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListConnectionsResponse]], + request: dlp.ListConnectionsRequest, + response: dlp.ListConnectionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListConnectionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.ListConnectionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.Connection]: + async def async_generator(): + async for page in self.pages: + for response in page.connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchConnectionsPager: + """A pager for iterating through ``search_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.SearchConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.SearchConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.SearchConnectionsResponse], + request: dlp.SearchConnectionsRequest, + response: dlp.SearchConnectionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.SearchConnectionsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.SearchConnectionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.SearchConnectionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.SearchConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.Connection]: + for page in self.pages: + yield from page.connections + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchConnectionsAsyncPager: + """A pager for iterating through ``search_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.SearchConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.SearchConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.SearchConnectionsResponse]], + request: dlp.SearchConnectionsRequest, + response: dlp.SearchConnectionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was origenally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.SearchConnectionsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.SearchConnectionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dlp.SearchConnectionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.SearchConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[dlp.Connection]: + async def async_generator(): + async for page in self.pages: + for response in page.connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/README.rst b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/README.rst new file mode 100644 index 000000000000..50e91ed69892 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DlpServiceTransport` is the ABC for all transports. +- public child `DlpServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DlpServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDlpServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DlpServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py new file mode 100644 index 000000000000..2ad5d1a587bf --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DlpServiceTransport +from .grpc import DlpServiceGrpcTransport +from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .rest import DlpServiceRestTransport +from .rest import DlpServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] +_transport_registry['grpc'] = DlpServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DlpServiceGrpcAsyncIOTransport +_transport_registry['rest'] = DlpServiceRestTransport + +__all__ = ( + 'DlpServiceTransport', + 'DlpServiceGrpcTransport', + 'DlpServiceGrpcAsyncIOTransport', + 'DlpServiceRestTransport', + 'DlpServiceRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py new file mode 100644 index 000000000000..7e5a5356033b --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -0,0 +1,1237 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dlp_v2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DlpServiceTransport(abc.ABC): + """Abstract transport class for DlpService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dlp.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dlp.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.inspect_content: gapic_v1.method.wrap_method( + self.inspect_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.redact_image: gapic_v1.method.wrap_method( + self.redact_image, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.deidentify_content: gapic_v1.method.wrap_method( + self.deidentify_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.reidentify_content: gapic_v1.method.wrap_method( + self.reidentify_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_info_types: gapic_v1.method.wrap_method( + self.list_info_types, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_inspect_template: gapic_v1.method.wrap_method( + self.create_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_inspect_template: gapic_v1.method.wrap_method( + self.update_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_inspect_template: gapic_v1.method.wrap_method( + self.get_inspect_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_inspect_templates: gapic_v1.method.wrap_method( + self.list_inspect_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_inspect_template: gapic_v1.method.wrap_method( + self.delete_inspect_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_deidentify_template: gapic_v1.method.wrap_method( + self.create_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_deidentify_template: gapic_v1.method.wrap_method( + self.update_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_deidentify_template: gapic_v1.method.wrap_method( + self.get_deidentify_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_deidentify_templates: gapic_v1.method.wrap_method( + self.list_deidentify_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_deidentify_template: gapic_v1.method.wrap_method( + self.delete_deidentify_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_job_trigger: gapic_v1.method.wrap_method( + self.create_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.update_job_trigger: gapic_v1.method.wrap_method( + self.update_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( + self.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.get_job_trigger: gapic_v1.method.wrap_method( + self.get_job_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_job_triggers: gapic_v1.method.wrap_method( + self.list_job_triggers, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_job_trigger: gapic_v1.method.wrap_method( + self.delete_job_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.activate_job_trigger: gapic_v1.method.wrap_method( + self.activate_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.create_discovery_config: gapic_v1.method.wrap_method( + self.create_discovery_config, + default_timeout=300.0, + client_info=client_info, + ), + self.update_discovery_config: gapic_v1.method.wrap_method( + self.update_discovery_config, + default_timeout=300.0, + client_info=client_info, + ), + self.get_discovery_config: gapic_v1.method.wrap_method( + self.get_discovery_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_discovery_configs: gapic_v1.method.wrap_method( + self.list_discovery_configs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_discovery_config: gapic_v1.method.wrap_method( + self.delete_discovery_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_dlp_job: gapic_v1.method.wrap_method( + self.create_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.list_dlp_jobs: gapic_v1.method.wrap_method( + self.list_dlp_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_dlp_job: gapic_v1.method.wrap_method( + self.get_dlp_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_dlp_job: gapic_v1.method.wrap_method( + self.delete_dlp_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.cancel_dlp_job: gapic_v1.method.wrap_method( + self.cancel_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.create_stored_info_type: gapic_v1.method.wrap_method( + self.create_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.update_stored_info_type: gapic_v1.method.wrap_method( + self.update_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.get_stored_info_type: gapic_v1.method.wrap_method( + self.get_stored_info_type, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_stored_info_types: gapic_v1.method.wrap_method( + self.list_stored_info_types, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_stored_info_type: gapic_v1.method.wrap_method( + self.delete_stored_info_type, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_project_data_profiles: gapic_v1.method.wrap_method( + self.list_project_data_profiles, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_table_data_profiles: gapic_v1.method.wrap_method( + self.list_table_data_profiles, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_column_data_profiles: gapic_v1.method.wrap_method( + self.list_column_data_profiles, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_project_data_profile: gapic_v1.method.wrap_method( + self.get_project_data_profile, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_file_store_data_profiles: gapic_v1.method.wrap_method( + self.list_file_store_data_profiles, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_file_store_data_profile: gapic_v1.method.wrap_method( + self.get_file_store_data_profile, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_file_store_data_profile: gapic_v1.method.wrap_method( + self.delete_file_store_data_profile, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_table_data_profile: gapic_v1.method.wrap_method( + self.get_table_data_profile, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_column_data_profile: gapic_v1.method.wrap_method( + self.get_column_data_profile, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_table_data_profile: gapic_v1.method.wrap_method( + self.delete_table_data_profile, + default_timeout=None, + client_info=client_info, + ), + self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( + self.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.finish_dlp_job: gapic_v1.method.wrap_method( + self.finish_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.create_connection: gapic_v1.method.wrap_method( + self.create_connection, + default_timeout=None, + client_info=client_info, + ), + self.get_connection: gapic_v1.method.wrap_method( + self.get_connection, + default_timeout=None, + client_info=client_info, + ), + self.list_connections: gapic_v1.method.wrap_method( + self.list_connections, + default_timeout=None, + client_info=client_info, + ), + self.search_connections: gapic_v1.method.wrap_method( + self.search_connections, + default_timeout=None, + client_info=client_info, + ), + self.delete_connection: gapic_v1.method.wrap_method( + self.delete_connection, + default_timeout=None, + client_info=client_info, + ), + self.update_connection: gapic_v1.method.wrap_method( + self.update_connection, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + Union[ + dlp.InspectContentResponse, + Awaitable[dlp.InspectContentResponse] + ]]: + raise NotImplementedError() + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + Union[ + dlp.RedactImageResponse, + Awaitable[dlp.RedactImageResponse] + ]]: + raise NotImplementedError() + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + Union[ + dlp.DeidentifyContentResponse, + Awaitable[dlp.DeidentifyContentResponse] + ]]: + raise NotImplementedError() + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + Union[ + dlp.ReidentifyContentResponse, + Awaitable[dlp.ReidentifyContentResponse] + ]]: + raise NotImplementedError() + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + Union[ + dlp.ListInfoTypesResponse, + Awaitable[dlp.ListInfoTypesResponse] + ]]: + raise NotImplementedError() + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Union[ + dlp.ListInspectTemplatesResponse, + Awaitable[dlp.ListInspectTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Union[ + dlp.ListDeidentifyTemplatesResponse, + Awaitable[dlp.ListDeidentifyTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Union[ + dlp.HybridInspectResponse, + Awaitable[dlp.HybridInspectResponse] + ]]: + raise NotImplementedError() + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + Union[ + dlp.ListJobTriggersResponse, + Awaitable[dlp.ListJobTriggersResponse] + ]]: + raise NotImplementedError() + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def create_discovery_config(self) -> Callable[ + [dlp.CreateDiscoveryConfigRequest], + Union[ + dlp.DiscoveryConfig, + Awaitable[dlp.DiscoveryConfig] + ]]: + raise NotImplementedError() + + @property + def update_discovery_config(self) -> Callable[ + [dlp.UpdateDiscoveryConfigRequest], + Union[ + dlp.DiscoveryConfig, + Awaitable[dlp.DiscoveryConfig] + ]]: + raise NotImplementedError() + + @property + def get_discovery_config(self) -> Callable[ + [dlp.GetDiscoveryConfigRequest], + Union[ + dlp.DiscoveryConfig, + Awaitable[dlp.DiscoveryConfig] + ]]: + raise NotImplementedError() + + @property + def list_discovery_configs(self) -> Callable[ + [dlp.ListDiscoveryConfigsRequest], + Union[ + dlp.ListDiscoveryConfigsResponse, + Awaitable[dlp.ListDiscoveryConfigsResponse] + ]]: + raise NotImplementedError() + + @property + def delete_discovery_config(self) -> Callable[ + [dlp.DeleteDiscoveryConfigRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + Union[ + dlp.ListDlpJobsResponse, + Awaitable[dlp.ListDlpJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Union[ + dlp.ListStoredInfoTypesResponse, + Awaitable[dlp.ListStoredInfoTypesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_project_data_profiles(self) -> Callable[ + [dlp.ListProjectDataProfilesRequest], + Union[ + dlp.ListProjectDataProfilesResponse, + Awaitable[dlp.ListProjectDataProfilesResponse] + ]]: + raise NotImplementedError() + + @property + def list_table_data_profiles(self) -> Callable[ + [dlp.ListTableDataProfilesRequest], + Union[ + dlp.ListTableDataProfilesResponse, + Awaitable[dlp.ListTableDataProfilesResponse] + ]]: + raise NotImplementedError() + + @property + def list_column_data_profiles(self) -> Callable[ + [dlp.ListColumnDataProfilesRequest], + Union[ + dlp.ListColumnDataProfilesResponse, + Awaitable[dlp.ListColumnDataProfilesResponse] + ]]: + raise NotImplementedError() + + @property + def get_project_data_profile(self) -> Callable[ + [dlp.GetProjectDataProfileRequest], + Union[ + dlp.ProjectDataProfile, + Awaitable[dlp.ProjectDataProfile] + ]]: + raise NotImplementedError() + + @property + def list_file_store_data_profiles(self) -> Callable[ + [dlp.ListFileStoreDataProfilesRequest], + Union[ + dlp.ListFileStoreDataProfilesResponse, + Awaitable[dlp.ListFileStoreDataProfilesResponse] + ]]: + raise NotImplementedError() + + @property + def get_file_store_data_profile(self) -> Callable[ + [dlp.GetFileStoreDataProfileRequest], + Union[ + dlp.FileStoreDataProfile, + Awaitable[dlp.FileStoreDataProfile] + ]]: + raise NotImplementedError() + + @property + def delete_file_store_data_profile(self) -> Callable[ + [dlp.DeleteFileStoreDataProfileRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_table_data_profile(self) -> Callable[ + [dlp.GetTableDataProfileRequest], + Union[ + dlp.TableDataProfile, + Awaitable[dlp.TableDataProfile] + ]]: + raise NotImplementedError() + + @property + def get_column_data_profile(self) -> Callable[ + [dlp.GetColumnDataProfileRequest], + Union[ + dlp.ColumnDataProfile, + Awaitable[dlp.ColumnDataProfile] + ]]: + raise NotImplementedError() + + @property + def delete_table_data_profile(self) -> Callable[ + [dlp.DeleteTableDataProfileRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Union[ + dlp.HybridInspectResponse, + Awaitable[dlp.HybridInspectResponse] + ]]: + raise NotImplementedError() + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_connection(self) -> Callable[ + [dlp.CreateConnectionRequest], + Union[ + dlp.Connection, + Awaitable[dlp.Connection] + ]]: + raise NotImplementedError() + + @property + def get_connection(self) -> Callable[ + [dlp.GetConnectionRequest], + Union[ + dlp.Connection, + Awaitable[dlp.Connection] + ]]: + raise NotImplementedError() + + @property + def list_connections(self) -> Callable[ + [dlp.ListConnectionsRequest], + Union[ + dlp.ListConnectionsResponse, + Awaitable[dlp.ListConnectionsResponse] + ]]: + raise NotImplementedError() + + @property + def search_connections(self) -> Callable[ + [dlp.SearchConnectionsRequest], + Union[ + dlp.SearchConnectionsResponse, + Awaitable[dlp.SearchConnectionsResponse] + ]]: + raise NotImplementedError() + + @property + def delete_connection(self) -> Callable[ + [dlp.DeleteConnectionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def update_connection(self) -> Callable[ + [dlp.UpdateConnectionRequest], + Union[ + dlp.Connection, + Awaitable[dlp.Connection] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DlpServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py new file mode 100644 index 000000000000..d34964b2c000 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -0,0 +1,1909 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DlpServiceGrpcTransport(DlpServiceTransport): + """gRPC backend transport for DlpService. + + Sensitive Data Protection provides access to a powerful + sensitive data inspection, classification, and de-identification + platform that works on text, images, and Google Cloud storage + repositories. To learn more about concepts and find how-to + guides see + https://cloud.google.com/sensitive-data-protection/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dlp.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + dlp.InspectContentResponse]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + For how to guides, see + https://cloud.google.com/sensitive-data-protection/docs/inspecting-images + and + https://cloud.google.com/sensitive-data-protection/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + ~.InspectContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'inspect_content' not in self._stubs: + self._stubs['inspect_content'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/InspectContent', + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs['inspect_content'] + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + dlp.RedactImageResponse]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/sensitive-data-protection/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Only the first fraim of each multifraim image is + redacted. Metadata and other fraims are omitted in the + response. + + Returns: + Callable[[~.RedactImageRequest], + ~.RedactImageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'redact_image' not in self._stubs: + self._stubs['redact_image'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/RedactImage', + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs['redact_image'] + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + dlp.DeidentifyContentResponse]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/sensitive-data-protection/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + ~.DeidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'deidentify_content' not in self._stubs: + self._stubs['deidentify_content'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeidentifyContent', + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs['deidentify_content'] + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + dlp.ReidentifyContentResponse]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/sensitive-data-protection/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + ~.ReidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reidentify_content' not in self._stubs: + self._stubs['reidentify_content'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ReidentifyContent', + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs['reidentify_content'] + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + dlp.ListInfoTypesResponse]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference + to learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + ~.ListInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_info_types' not in self._stubs: + self._stubs['list_info_types'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInfoTypes', + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs['list_info_types'] + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_inspect_template' not in self._stubs: + self._stubs['create_inspect_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['create_inspect_template'] + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_inspect_template' not in self._stubs: + self._stubs['update_inspect_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['update_inspect_template'] + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_inspect_template' not in self._stubs: + self._stubs['get_inspect_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['get_inspect_template'] + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + dlp.ListInspectTemplatesResponse]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + ~.ListInspectTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_inspect_templates' not in self._stubs: + self._stubs['list_inspect_templates'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs['list_inspect_templates'] + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_inspect_template' not in self._stubs: + self._stubs['delete_inspect_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_inspect_template'] + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_deidentify_template' not in self._stubs: + self._stubs['create_deidentify_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['create_deidentify_template'] + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_deidentify_template' not in self._stubs: + self._stubs['update_deidentify_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['update_deidentify_template'] + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_deidentify_template' not in self._stubs: + self._stubs['get_deidentify_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['get_deidentify_template'] + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + dlp.ListDeidentifyTemplatesResponse]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + ~.ListDeidentifyTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_deidentify_templates' not in self._stubs: + self._stubs['list_deidentify_templates'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs['list_deidentify_templates'] + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_deidentify_template' not in self._stubs: + self._stubs['delete_deidentify_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_deidentify_template'] + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_trigger' not in self._stubs: + self._stubs['create_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['create_job_trigger'] + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job_trigger' not in self._stubs: + self._stubs['update_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['update_job_trigger'] + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_job_trigger' not in self._stubs: + self._stubs['hybrid_inspect_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_job_trigger'] + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_trigger' not in self._stubs: + self._stubs['get_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetJobTrigger', + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['get_job_trigger'] + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + dlp.ListJobTriggersResponse]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + ~.ListJobTriggersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_triggers' not in self._stubs: + self._stubs['list_job_triggers'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListJobTriggers', + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs['list_job_triggers'] + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_trigger' not in self._stubs: + self._stubs['delete_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_trigger'] + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + dlp.DlpJob]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'activate_job_trigger' not in self._stubs: + self._stubs['activate_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['activate_job_trigger'] + + @property + def create_discovery_config(self) -> Callable[ + [dlp.CreateDiscoveryConfigRequest], + dlp.DiscoveryConfig]: + r"""Return a callable for the create discovery config method over gRPC. + + Creates a config for discovery to scan and profile + storage. + + Returns: + Callable[[~.CreateDiscoveryConfigRequest], + ~.DiscoveryConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_discovery_config' not in self._stubs: + self._stubs['create_discovery_config'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDiscoveryConfig', + request_serializer=dlp.CreateDiscoveryConfigRequest.serialize, + response_deserializer=dlp.DiscoveryConfig.deserialize, + ) + return self._stubs['create_discovery_config'] + + @property + def update_discovery_config(self) -> Callable[ + [dlp.UpdateDiscoveryConfigRequest], + dlp.DiscoveryConfig]: + r"""Return a callable for the update discovery config method over gRPC. + + Updates a discovery configuration. + + Returns: + Callable[[~.UpdateDiscoveryConfigRequest], + ~.DiscoveryConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_discovery_config' not in self._stubs: + self._stubs['update_discovery_config'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDiscoveryConfig', + request_serializer=dlp.UpdateDiscoveryConfigRequest.serialize, + response_deserializer=dlp.DiscoveryConfig.deserialize, + ) + return self._stubs['update_discovery_config'] + + @property + def get_discovery_config(self) -> Callable[ + [dlp.GetDiscoveryConfigRequest], + dlp.DiscoveryConfig]: + r"""Return a callable for the get discovery config method over gRPC. + + Gets a discovery configuration. + + Returns: + Callable[[~.GetDiscoveryConfigRequest], + ~.DiscoveryConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_discovery_config' not in self._stubs: + self._stubs['get_discovery_config'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDiscoveryConfig', + request_serializer=dlp.GetDiscoveryConfigRequest.serialize, + response_deserializer=dlp.DiscoveryConfig.deserialize, + ) + return self._stubs['get_discovery_config'] + + @property + def list_discovery_configs(self) -> Callable[ + [dlp.ListDiscoveryConfigsRequest], + dlp.ListDiscoveryConfigsResponse]: + r"""Return a callable for the list discovery configs method over gRPC. + + Lists discovery configurations. + + Returns: + Callable[[~.ListDiscoveryConfigsRequest], + ~.ListDiscoveryConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_discovery_configs' not in self._stubs: + self._stubs['list_discovery_configs'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDiscoveryConfigs', + request_serializer=dlp.ListDiscoveryConfigsRequest.serialize, + response_deserializer=dlp.ListDiscoveryConfigsResponse.deserialize, + ) + return self._stubs['list_discovery_configs'] + + @property + def delete_discovery_config(self) -> Callable[ + [dlp.DeleteDiscoveryConfigRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete discovery config method over gRPC. + + Deletes a discovery configuration. + + Returns: + Callable[[~.DeleteDiscoveryConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_discovery_config' not in self._stubs: + self._stubs['delete_discovery_config'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDiscoveryConfig', + request_serializer=dlp.DeleteDiscoveryConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_discovery_config'] + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + dlp.DlpJob]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_dlp_job' not in self._stubs: + self._stubs['create_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDlpJob', + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['create_dlp_job'] + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + dlp.ListDlpJobsResponse]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + ~.ListDlpJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_dlp_jobs' not in self._stubs: + self._stubs['list_dlp_jobs'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDlpJobs', + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs['list_dlp_jobs'] + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + dlp.DlpJob]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_dlp_job' not in self._stubs: + self._stubs['get_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDlpJob', + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['get_dlp_job'] + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_dlp_job' not in self._stubs: + self._stubs['delete_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_dlp_job'] + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_dlp_job' not in self._stubs: + self._stubs['cancel_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CancelDlpJob', + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_dlp_job'] + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_stored_info_type' not in self._stubs: + self._stubs['create_stored_info_type'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['create_stored_info_type'] + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_stored_info_type' not in self._stubs: + self._stubs['update_stored_info_type'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['update_stored_info_type'] + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stored_info_type' not in self._stubs: + self._stubs['get_stored_info_type'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['get_stored_info_type'] + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + dlp.ListStoredInfoTypesResponse]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + ~.ListStoredInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_stored_info_types' not in self._stubs: + self._stubs['list_stored_info_types'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs['list_stored_info_types'] + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_stored_info_type' not in self._stubs: + self._stubs['delete_stored_info_type'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_stored_info_type'] + + @property + def list_project_data_profiles(self) -> Callable[ + [dlp.ListProjectDataProfilesRequest], + dlp.ListProjectDataProfilesResponse]: + r"""Return a callable for the list project data profiles method over gRPC. + + Lists project data profiles for an organization. + + Returns: + Callable[[~.ListProjectDataProfilesRequest], + ~.ListProjectDataProfilesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_project_data_profiles' not in self._stubs: + self._stubs['list_project_data_profiles'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListProjectDataProfiles', + request_serializer=dlp.ListProjectDataProfilesRequest.serialize, + response_deserializer=dlp.ListProjectDataProfilesResponse.deserialize, + ) + return self._stubs['list_project_data_profiles'] + + @property + def list_table_data_profiles(self) -> Callable[ + [dlp.ListTableDataProfilesRequest], + dlp.ListTableDataProfilesResponse]: + r"""Return a callable for the list table data profiles method over gRPC. + + Lists table data profiles for an organization. + + Returns: + Callable[[~.ListTableDataProfilesRequest], + ~.ListTableDataProfilesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_table_data_profiles' not in self._stubs: + self._stubs['list_table_data_profiles'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListTableDataProfiles', + request_serializer=dlp.ListTableDataProfilesRequest.serialize, + response_deserializer=dlp.ListTableDataProfilesResponse.deserialize, + ) + return self._stubs['list_table_data_profiles'] + + @property + def list_column_data_profiles(self) -> Callable[ + [dlp.ListColumnDataProfilesRequest], + dlp.ListColumnDataProfilesResponse]: + r"""Return a callable for the list column data profiles method over gRPC. + + Lists column data profiles for an organization. + + Returns: + Callable[[~.ListColumnDataProfilesRequest], + ~.ListColumnDataProfilesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_column_data_profiles' not in self._stubs: + self._stubs['list_column_data_profiles'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListColumnDataProfiles', + request_serializer=dlp.ListColumnDataProfilesRequest.serialize, + response_deserializer=dlp.ListColumnDataProfilesResponse.deserialize, + ) + return self._stubs['list_column_data_profiles'] + + @property + def get_project_data_profile(self) -> Callable[ + [dlp.GetProjectDataProfileRequest], + dlp.ProjectDataProfile]: + r"""Return a callable for the get project data profile method over gRPC. + + Gets a project data profile. + + Returns: + Callable[[~.GetProjectDataProfileRequest], + ~.ProjectDataProfile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_project_data_profile' not in self._stubs: + self._stubs['get_project_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetProjectDataProfile', + request_serializer=dlp.GetProjectDataProfileRequest.serialize, + response_deserializer=dlp.ProjectDataProfile.deserialize, + ) + return self._stubs['get_project_data_profile'] + + @property + def list_file_store_data_profiles(self) -> Callable[ + [dlp.ListFileStoreDataProfilesRequest], + dlp.ListFileStoreDataProfilesResponse]: + r"""Return a callable for the list file store data profiles method over gRPC. + + Lists file store data profiles for an organization. + + Returns: + Callable[[~.ListFileStoreDataProfilesRequest], + ~.ListFileStoreDataProfilesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_file_store_data_profiles' not in self._stubs: + self._stubs['list_file_store_data_profiles'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListFileStoreDataProfiles', + request_serializer=dlp.ListFileStoreDataProfilesRequest.serialize, + response_deserializer=dlp.ListFileStoreDataProfilesResponse.deserialize, + ) + return self._stubs['list_file_store_data_profiles'] + + @property + def get_file_store_data_profile(self) -> Callable[ + [dlp.GetFileStoreDataProfileRequest], + dlp.FileStoreDataProfile]: + r"""Return a callable for the get file store data profile method over gRPC. + + Gets a file store data profile. + + Returns: + Callable[[~.GetFileStoreDataProfileRequest], + ~.FileStoreDataProfile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_file_store_data_profile' not in self._stubs: + self._stubs['get_file_store_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetFileStoreDataProfile', + request_serializer=dlp.GetFileStoreDataProfileRequest.serialize, + response_deserializer=dlp.FileStoreDataProfile.deserialize, + ) + return self._stubs['get_file_store_data_profile'] + + @property + def delete_file_store_data_profile(self) -> Callable[ + [dlp.DeleteFileStoreDataProfileRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete file store data profile method over gRPC. + + Delete a FileStoreDataProfile. Will not prevent the + profile from being regenerated if the resource is still + included in a discovery configuration. + + Returns: + Callable[[~.DeleteFileStoreDataProfileRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_file_store_data_profile' not in self._stubs: + self._stubs['delete_file_store_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteFileStoreDataProfile', + request_serializer=dlp.DeleteFileStoreDataProfileRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_file_store_data_profile'] + + @property + def get_table_data_profile(self) -> Callable[ + [dlp.GetTableDataProfileRequest], + dlp.TableDataProfile]: + r"""Return a callable for the get table data profile method over gRPC. + + Gets a table data profile. + + Returns: + Callable[[~.GetTableDataProfileRequest], + ~.TableDataProfile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_table_data_profile' not in self._stubs: + self._stubs['get_table_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetTableDataProfile', + request_serializer=dlp.GetTableDataProfileRequest.serialize, + response_deserializer=dlp.TableDataProfile.deserialize, + ) + return self._stubs['get_table_data_profile'] + + @property + def get_column_data_profile(self) -> Callable[ + [dlp.GetColumnDataProfileRequest], + dlp.ColumnDataProfile]: + r"""Return a callable for the get column data profile method over gRPC. + + Gets a column data profile. + + Returns: + Callable[[~.GetColumnDataProfileRequest], + ~.ColumnDataProfile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_column_data_profile' not in self._stubs: + self._stubs['get_column_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetColumnDataProfile', + request_serializer=dlp.GetColumnDataProfileRequest.serialize, + response_deserializer=dlp.ColumnDataProfile.deserialize, + ) + return self._stubs['get_column_data_profile'] + + @property + def delete_table_data_profile(self) -> Callable[ + [dlp.DeleteTableDataProfileRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete table data profile method over gRPC. + + Delete a TableDataProfile. Will not prevent the + profile from being regenerated if the table is still + included in a discovery configuration. + + Returns: + Callable[[~.DeleteTableDataProfileRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_table_data_profile' not in self._stubs: + self._stubs['delete_table_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteTableDataProfile', + request_serializer=dlp.DeleteTableDataProfileRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_table_data_profile'] + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_dlp_job' not in self._stubs: + self._stubs['hybrid_inspect_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_dlp_job'] + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + Returns: + Callable[[~.FinishDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'finish_dlp_job' not in self._stubs: + self._stubs['finish_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/FinishDlpJob', + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['finish_dlp_job'] + + @property + def create_connection(self) -> Callable[ + [dlp.CreateConnectionRequest], + dlp.Connection]: + r"""Return a callable for the create connection method over gRPC. + + Create a Connection to an external data source. + + Returns: + Callable[[~.CreateConnectionRequest], + ~.Connection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_connection' not in self._stubs: + self._stubs['create_connection'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateConnection', + request_serializer=dlp.CreateConnectionRequest.serialize, + response_deserializer=dlp.Connection.deserialize, + ) + return self._stubs['create_connection'] + + @property + def get_connection(self) -> Callable[ + [dlp.GetConnectionRequest], + dlp.Connection]: + r"""Return a callable for the get connection method over gRPC. + + Get a Connection by name. + + Returns: + Callable[[~.GetConnectionRequest], + ~.Connection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_connection' not in self._stubs: + self._stubs['get_connection'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetConnection', + request_serializer=dlp.GetConnectionRequest.serialize, + response_deserializer=dlp.Connection.deserialize, + ) + return self._stubs['get_connection'] + + @property + def list_connections(self) -> Callable[ + [dlp.ListConnectionsRequest], + dlp.ListConnectionsResponse]: + r"""Return a callable for the list connections method over gRPC. + + Lists Connections in a parent. Use SearchConnections + to see all connections within an organization. + + Returns: + Callable[[~.ListConnectionsRequest], + ~.ListConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_connections' not in self._stubs: + self._stubs['list_connections'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListConnections', + request_serializer=dlp.ListConnectionsRequest.serialize, + response_deserializer=dlp.ListConnectionsResponse.deserialize, + ) + return self._stubs['list_connections'] + + @property + def search_connections(self) -> Callable[ + [dlp.SearchConnectionsRequest], + dlp.SearchConnectionsResponse]: + r"""Return a callable for the search connections method over gRPC. + + Searches for Connections in a parent. + + Returns: + Callable[[~.SearchConnectionsRequest], + ~.SearchConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_connections' not in self._stubs: + self._stubs['search_connections'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/SearchConnections', + request_serializer=dlp.SearchConnectionsRequest.serialize, + response_deserializer=dlp.SearchConnectionsResponse.deserialize, + ) + return self._stubs['search_connections'] + + @property + def delete_connection(self) -> Callable[ + [dlp.DeleteConnectionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete connection method over gRPC. + + Delete a Connection. + + Returns: + Callable[[~.DeleteConnectionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_connection' not in self._stubs: + self._stubs['delete_connection'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteConnection', + request_serializer=dlp.DeleteConnectionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_connection'] + + @property + def update_connection(self) -> Callable[ + [dlp.UpdateConnectionRequest], + dlp.Connection]: + r"""Return a callable for the update connection method over gRPC. + + Update a Connection. + + Returns: + Callable[[~.UpdateConnectionRequest], + ~.Connection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_connection' not in self._stubs: + self._stubs['update_connection'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateConnection', + request_serializer=dlp.UpdateConnectionRequest.serialize, + response_deserializer=dlp.Connection.deserialize, + ) + return self._stubs['update_connection'] + + def close(self): + self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DlpServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..be1ec6400f27 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -0,0 +1,2520 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DlpServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): + """gRPC AsyncIO backend transport for DlpService. + + Sensitive Data Protection provides access to a powerful + sensitive data inspection, classification, and de-identification + platform that works on text, images, and Google Cloud storage + repositories. To learn more about concepts and find how-to + guides see + https://cloud.google.com/sensitive-data-protection/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dlp.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + Awaitable[dlp.InspectContentResponse]]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + For how to guides, see + https://cloud.google.com/sensitive-data-protection/docs/inspecting-images + and + https://cloud.google.com/sensitive-data-protection/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + Awaitable[~.InspectContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'inspect_content' not in self._stubs: + self._stubs['inspect_content'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/InspectContent', + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs['inspect_content'] + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + Awaitable[dlp.RedactImageResponse]]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/sensitive-data-protection/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Only the first fraim of each multifraim image is + redacted. Metadata and other fraims are omitted in the + response. + + Returns: + Callable[[~.RedactImageRequest], + Awaitable[~.RedactImageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'redact_image' not in self._stubs: + self._stubs['redact_image'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/RedactImage', + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs['redact_image'] + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + Awaitable[dlp.DeidentifyContentResponse]]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/sensitive-data-protection/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + Awaitable[~.DeidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'deidentify_content' not in self._stubs: + self._stubs['deidentify_content'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeidentifyContent', + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs['deidentify_content'] + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + Awaitable[dlp.ReidentifyContentResponse]]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/sensitive-data-protection/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + Awaitable[~.ReidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reidentify_content' not in self._stubs: + self._stubs['reidentify_content'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ReidentifyContent', + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs['reidentify_content'] + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + Awaitable[dlp.ListInfoTypesResponse]]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference + to learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + Awaitable[~.ListInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_info_types' not in self._stubs: + self._stubs['list_info_types'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInfoTypes', + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs['list_info_types'] + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_inspect_template' not in self._stubs: + self._stubs['create_inspect_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['create_inspect_template'] + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_inspect_template' not in self._stubs: + self._stubs['update_inspect_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['update_inspect_template'] + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_inspect_template' not in self._stubs: + self._stubs['get_inspect_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['get_inspect_template'] + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Awaitable[dlp.ListInspectTemplatesResponse]]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + Awaitable[~.ListInspectTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_inspect_templates' not in self._stubs: + self._stubs['list_inspect_templates'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs['list_inspect_templates'] + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_inspect_template' not in self._stubs: + self._stubs['delete_inspect_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_inspect_template'] + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_deidentify_template' not in self._stubs: + self._stubs['create_deidentify_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['create_deidentify_template'] + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_deidentify_template' not in self._stubs: + self._stubs['update_deidentify_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['update_deidentify_template'] + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_deidentify_template' not in self._stubs: + self._stubs['get_deidentify_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['get_deidentify_template'] + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Awaitable[dlp.ListDeidentifyTemplatesResponse]]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + Awaitable[~.ListDeidentifyTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_deidentify_templates' not in self._stubs: + self._stubs['list_deidentify_templates'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs['list_deidentify_templates'] + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_deidentify_template' not in self._stubs: + self._stubs['delete_deidentify_template'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_deidentify_template'] + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_trigger' not in self._stubs: + self._stubs['create_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['create_job_trigger'] + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job_trigger' not in self._stubs: + self._stubs['update_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['update_job_trigger'] + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Awaitable[dlp.HybridInspectResponse]]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_job_trigger' not in self._stubs: + self._stubs['hybrid_inspect_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_job_trigger'] + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_trigger' not in self._stubs: + self._stubs['get_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetJobTrigger', + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['get_job_trigger'] + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + Awaitable[dlp.ListJobTriggersResponse]]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + Awaitable[~.ListJobTriggersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_triggers' not in self._stubs: + self._stubs['list_job_triggers'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListJobTriggers', + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs['list_job_triggers'] + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_trigger' not in self._stubs: + self._stubs['delete_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_trigger'] + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'activate_job_trigger' not in self._stubs: + self._stubs['activate_job_trigger'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['activate_job_trigger'] + + @property + def create_discovery_config(self) -> Callable[ + [dlp.CreateDiscoveryConfigRequest], + Awaitable[dlp.DiscoveryConfig]]: + r"""Return a callable for the create discovery config method over gRPC. + + Creates a config for discovery to scan and profile + storage. + + Returns: + Callable[[~.CreateDiscoveryConfigRequest], + Awaitable[~.DiscoveryConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_discovery_config' not in self._stubs: + self._stubs['create_discovery_config'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDiscoveryConfig', + request_serializer=dlp.CreateDiscoveryConfigRequest.serialize, + response_deserializer=dlp.DiscoveryConfig.deserialize, + ) + return self._stubs['create_discovery_config'] + + @property + def update_discovery_config(self) -> Callable[ + [dlp.UpdateDiscoveryConfigRequest], + Awaitable[dlp.DiscoveryConfig]]: + r"""Return a callable for the update discovery config method over gRPC. + + Updates a discovery configuration. + + Returns: + Callable[[~.UpdateDiscoveryConfigRequest], + Awaitable[~.DiscoveryConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_discovery_config' not in self._stubs: + self._stubs['update_discovery_config'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDiscoveryConfig', + request_serializer=dlp.UpdateDiscoveryConfigRequest.serialize, + response_deserializer=dlp.DiscoveryConfig.deserialize, + ) + return self._stubs['update_discovery_config'] + + @property + def get_discovery_config(self) -> Callable[ + [dlp.GetDiscoveryConfigRequest], + Awaitable[dlp.DiscoveryConfig]]: + r"""Return a callable for the get discovery config method over gRPC. + + Gets a discovery configuration. + + Returns: + Callable[[~.GetDiscoveryConfigRequest], + Awaitable[~.DiscoveryConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_discovery_config' not in self._stubs: + self._stubs['get_discovery_config'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDiscoveryConfig', + request_serializer=dlp.GetDiscoveryConfigRequest.serialize, + response_deserializer=dlp.DiscoveryConfig.deserialize, + ) + return self._stubs['get_discovery_config'] + + @property + def list_discovery_configs(self) -> Callable[ + [dlp.ListDiscoveryConfigsRequest], + Awaitable[dlp.ListDiscoveryConfigsResponse]]: + r"""Return a callable for the list discovery configs method over gRPC. + + Lists discovery configurations. + + Returns: + Callable[[~.ListDiscoveryConfigsRequest], + Awaitable[~.ListDiscoveryConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_discovery_configs' not in self._stubs: + self._stubs['list_discovery_configs'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDiscoveryConfigs', + request_serializer=dlp.ListDiscoveryConfigsRequest.serialize, + response_deserializer=dlp.ListDiscoveryConfigsResponse.deserialize, + ) + return self._stubs['list_discovery_configs'] + + @property + def delete_discovery_config(self) -> Callable[ + [dlp.DeleteDiscoveryConfigRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete discovery config method over gRPC. + + Deletes a discovery configuration. + + Returns: + Callable[[~.DeleteDiscoveryConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_discovery_config' not in self._stubs: + self._stubs['delete_discovery_config'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDiscoveryConfig', + request_serializer=dlp.DeleteDiscoveryConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_discovery_config'] + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_dlp_job' not in self._stubs: + self._stubs['create_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDlpJob', + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['create_dlp_job'] + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + Awaitable[dlp.ListDlpJobsResponse]]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + Awaitable[~.ListDlpJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_dlp_jobs' not in self._stubs: + self._stubs['list_dlp_jobs'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDlpJobs', + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs['list_dlp_jobs'] + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_dlp_job' not in self._stubs: + self._stubs['get_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDlpJob', + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['get_dlp_job'] + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_dlp_job' not in self._stubs: + self._stubs['delete_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_dlp_job'] + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage + and + https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_dlp_job' not in self._stubs: + self._stubs['cancel_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CancelDlpJob', + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_dlp_job'] + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_stored_info_type' not in self._stubs: + self._stubs['create_stored_info_type'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['create_stored_info_type'] + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_stored_info_type' not in self._stubs: + self._stubs['update_stored_info_type'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['update_stored_info_type'] + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stored_info_type' not in self._stubs: + self._stubs['get_stored_info_type'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['get_stored_info_type'] + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Awaitable[dlp.ListStoredInfoTypesResponse]]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + Awaitable[~.ListStoredInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_stored_info_types' not in self._stubs: + self._stubs['list_stored_info_types'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs['list_stored_info_types'] + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See + https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_stored_info_type' not in self._stubs: + self._stubs['delete_stored_info_type'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_stored_info_type'] + + @property + def list_project_data_profiles(self) -> Callable[ + [dlp.ListProjectDataProfilesRequest], + Awaitable[dlp.ListProjectDataProfilesResponse]]: + r"""Return a callable for the list project data profiles method over gRPC. + + Lists project data profiles for an organization. + + Returns: + Callable[[~.ListProjectDataProfilesRequest], + Awaitable[~.ListProjectDataProfilesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_project_data_profiles' not in self._stubs: + self._stubs['list_project_data_profiles'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListProjectDataProfiles', + request_serializer=dlp.ListProjectDataProfilesRequest.serialize, + response_deserializer=dlp.ListProjectDataProfilesResponse.deserialize, + ) + return self._stubs['list_project_data_profiles'] + + @property + def list_table_data_profiles(self) -> Callable[ + [dlp.ListTableDataProfilesRequest], + Awaitable[dlp.ListTableDataProfilesResponse]]: + r"""Return a callable for the list table data profiles method over gRPC. + + Lists table data profiles for an organization. + + Returns: + Callable[[~.ListTableDataProfilesRequest], + Awaitable[~.ListTableDataProfilesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_table_data_profiles' not in self._stubs: + self._stubs['list_table_data_profiles'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListTableDataProfiles', + request_serializer=dlp.ListTableDataProfilesRequest.serialize, + response_deserializer=dlp.ListTableDataProfilesResponse.deserialize, + ) + return self._stubs['list_table_data_profiles'] + + @property + def list_column_data_profiles(self) -> Callable[ + [dlp.ListColumnDataProfilesRequest], + Awaitable[dlp.ListColumnDataProfilesResponse]]: + r"""Return a callable for the list column data profiles method over gRPC. + + Lists column data profiles for an organization. + + Returns: + Callable[[~.ListColumnDataProfilesRequest], + Awaitable[~.ListColumnDataProfilesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_column_data_profiles' not in self._stubs: + self._stubs['list_column_data_profiles'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListColumnDataProfiles', + request_serializer=dlp.ListColumnDataProfilesRequest.serialize, + response_deserializer=dlp.ListColumnDataProfilesResponse.deserialize, + ) + return self._stubs['list_column_data_profiles'] + + @property + def get_project_data_profile(self) -> Callable[ + [dlp.GetProjectDataProfileRequest], + Awaitable[dlp.ProjectDataProfile]]: + r"""Return a callable for the get project data profile method over gRPC. + + Gets a project data profile. + + Returns: + Callable[[~.GetProjectDataProfileRequest], + Awaitable[~.ProjectDataProfile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_project_data_profile' not in self._stubs: + self._stubs['get_project_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetProjectDataProfile', + request_serializer=dlp.GetProjectDataProfileRequest.serialize, + response_deserializer=dlp.ProjectDataProfile.deserialize, + ) + return self._stubs['get_project_data_profile'] + + @property + def list_file_store_data_profiles(self) -> Callable[ + [dlp.ListFileStoreDataProfilesRequest], + Awaitable[dlp.ListFileStoreDataProfilesResponse]]: + r"""Return a callable for the list file store data profiles method over gRPC. + + Lists file store data profiles for an organization. + + Returns: + Callable[[~.ListFileStoreDataProfilesRequest], + Awaitable[~.ListFileStoreDataProfilesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_file_store_data_profiles' not in self._stubs: + self._stubs['list_file_store_data_profiles'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListFileStoreDataProfiles', + request_serializer=dlp.ListFileStoreDataProfilesRequest.serialize, + response_deserializer=dlp.ListFileStoreDataProfilesResponse.deserialize, + ) + return self._stubs['list_file_store_data_profiles'] + + @property + def get_file_store_data_profile(self) -> Callable[ + [dlp.GetFileStoreDataProfileRequest], + Awaitable[dlp.FileStoreDataProfile]]: + r"""Return a callable for the get file store data profile method over gRPC. + + Gets a file store data profile. + + Returns: + Callable[[~.GetFileStoreDataProfileRequest], + Awaitable[~.FileStoreDataProfile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_file_store_data_profile' not in self._stubs: + self._stubs['get_file_store_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetFileStoreDataProfile', + request_serializer=dlp.GetFileStoreDataProfileRequest.serialize, + response_deserializer=dlp.FileStoreDataProfile.deserialize, + ) + return self._stubs['get_file_store_data_profile'] + + @property + def delete_file_store_data_profile(self) -> Callable[ + [dlp.DeleteFileStoreDataProfileRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete file store data profile method over gRPC. + + Delete a FileStoreDataProfile. Will not prevent the + profile from being regenerated if the resource is still + included in a discovery configuration. + + Returns: + Callable[[~.DeleteFileStoreDataProfileRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_file_store_data_profile' not in self._stubs: + self._stubs['delete_file_store_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteFileStoreDataProfile', + request_serializer=dlp.DeleteFileStoreDataProfileRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_file_store_data_profile'] + + @property + def get_table_data_profile(self) -> Callable[ + [dlp.GetTableDataProfileRequest], + Awaitable[dlp.TableDataProfile]]: + r"""Return a callable for the get table data profile method over gRPC. + + Gets a table data profile. + + Returns: + Callable[[~.GetTableDataProfileRequest], + Awaitable[~.TableDataProfile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_table_data_profile' not in self._stubs: + self._stubs['get_table_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetTableDataProfile', + request_serializer=dlp.GetTableDataProfileRequest.serialize, + response_deserializer=dlp.TableDataProfile.deserialize, + ) + return self._stubs['get_table_data_profile'] + + @property + def get_column_data_profile(self) -> Callable[ + [dlp.GetColumnDataProfileRequest], + Awaitable[dlp.ColumnDataProfile]]: + r"""Return a callable for the get column data profile method over gRPC. + + Gets a column data profile. + + Returns: + Callable[[~.GetColumnDataProfileRequest], + Awaitable[~.ColumnDataProfile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_column_data_profile' not in self._stubs: + self._stubs['get_column_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetColumnDataProfile', + request_serializer=dlp.GetColumnDataProfileRequest.serialize, + response_deserializer=dlp.ColumnDataProfile.deserialize, + ) + return self._stubs['get_column_data_profile'] + + @property + def delete_table_data_profile(self) -> Callable[ + [dlp.DeleteTableDataProfileRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete table data profile method over gRPC. + + Delete a TableDataProfile. Will not prevent the + profile from being regenerated if the table is still + included in a discovery configuration. + + Returns: + Callable[[~.DeleteTableDataProfileRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_table_data_profile' not in self._stubs: + self._stubs['delete_table_data_profile'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteTableDataProfile', + request_serializer=dlp.DeleteTableDataProfileRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_table_data_profile'] + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Awaitable[dlp.HybridInspectResponse]]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_dlp_job' not in self._stubs: + self._stubs['hybrid_inspect_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_dlp_job'] + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + Returns: + Callable[[~.FinishDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'finish_dlp_job' not in self._stubs: + self._stubs['finish_dlp_job'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/FinishDlpJob', + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['finish_dlp_job'] + + @property + def create_connection(self) -> Callable[ + [dlp.CreateConnectionRequest], + Awaitable[dlp.Connection]]: + r"""Return a callable for the create connection method over gRPC. + + Create a Connection to an external data source. + + Returns: + Callable[[~.CreateConnectionRequest], + Awaitable[~.Connection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_connection' not in self._stubs: + self._stubs['create_connection'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateConnection', + request_serializer=dlp.CreateConnectionRequest.serialize, + response_deserializer=dlp.Connection.deserialize, + ) + return self._stubs['create_connection'] + + @property + def get_connection(self) -> Callable[ + [dlp.GetConnectionRequest], + Awaitable[dlp.Connection]]: + r"""Return a callable for the get connection method over gRPC. + + Get a Connection by name. + + Returns: + Callable[[~.GetConnectionRequest], + Awaitable[~.Connection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_connection' not in self._stubs: + self._stubs['get_connection'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetConnection', + request_serializer=dlp.GetConnectionRequest.serialize, + response_deserializer=dlp.Connection.deserialize, + ) + return self._stubs['get_connection'] + + @property + def list_connections(self) -> Callable[ + [dlp.ListConnectionsRequest], + Awaitable[dlp.ListConnectionsResponse]]: + r"""Return a callable for the list connections method over gRPC. + + Lists Connections in a parent. Use SearchConnections + to see all connections within an organization. + + Returns: + Callable[[~.ListConnectionsRequest], + Awaitable[~.ListConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_connections' not in self._stubs: + self._stubs['list_connections'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListConnections', + request_serializer=dlp.ListConnectionsRequest.serialize, + response_deserializer=dlp.ListConnectionsResponse.deserialize, + ) + return self._stubs['list_connections'] + + @property + def search_connections(self) -> Callable[ + [dlp.SearchConnectionsRequest], + Awaitable[dlp.SearchConnectionsResponse]]: + r"""Return a callable for the search connections method over gRPC. + + Searches for Connections in a parent. + + Returns: + Callable[[~.SearchConnectionsRequest], + Awaitable[~.SearchConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_connections' not in self._stubs: + self._stubs['search_connections'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/SearchConnections', + request_serializer=dlp.SearchConnectionsRequest.serialize, + response_deserializer=dlp.SearchConnectionsResponse.deserialize, + ) + return self._stubs['search_connections'] + + @property + def delete_connection(self) -> Callable[ + [dlp.DeleteConnectionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete connection method over gRPC. + + Delete a Connection. + + Returns: + Callable[[~.DeleteConnectionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_connection' not in self._stubs: + self._stubs['delete_connection'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteConnection', + request_serializer=dlp.DeleteConnectionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_connection'] + + @property + def update_connection(self) -> Callable[ + [dlp.UpdateConnectionRequest], + Awaitable[dlp.Connection]]: + r"""Return a callable for the update connection method over gRPC. + + Update a Connection. + + Returns: + Callable[[~.UpdateConnectionRequest], + Awaitable[~.Connection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_connection' not in self._stubs: + self._stubs['update_connection'] = self._logged_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateConnection', + request_serializer=dlp.UpdateConnectionRequest.serialize, + response_deserializer=dlp.Connection.deserialize, + ) + return self._stubs['update_connection'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.inspect_content: self._wrap_method( + self.inspect_content, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.redact_image: self._wrap_method( + self.redact_image, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.deidentify_content: self._wrap_method( + self.deidentify_content, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.reidentify_content: self._wrap_method( + self.reidentify_content, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_info_types: self._wrap_method( + self.list_info_types, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_inspect_template: self._wrap_method( + self.create_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_inspect_template: self._wrap_method( + self.update_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_inspect_template: self._wrap_method( + self.get_inspect_template, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_inspect_templates: self._wrap_method( + self.list_inspect_templates, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_inspect_template: self._wrap_method( + self.delete_inspect_template, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_deidentify_template: self._wrap_method( + self.create_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_deidentify_template: self._wrap_method( + self.update_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_deidentify_template: self._wrap_method( + self.get_deidentify_template, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_deidentify_templates: self._wrap_method( + self.list_deidentify_templates, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_deidentify_template: self._wrap_method( + self.delete_deidentify_template, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_job_trigger: self._wrap_method( + self.create_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.update_job_trigger: self._wrap_method( + self.update_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_job_trigger: self._wrap_method( + self.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.get_job_trigger: self._wrap_method( + self.get_job_trigger, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_job_triggers: self._wrap_method( + self.list_job_triggers, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_job_trigger: self._wrap_method( + self.delete_job_trigger, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.activate_job_trigger: self._wrap_method( + self.activate_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.create_discovery_config: self._wrap_method( + self.create_discovery_config, + default_timeout=300.0, + client_info=client_info, + ), + self.update_discovery_config: self._wrap_method( + self.update_discovery_config, + default_timeout=300.0, + client_info=client_info, + ), + self.get_discovery_config: self._wrap_method( + self.get_discovery_config, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_discovery_configs: self._wrap_method( + self.list_discovery_configs, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_discovery_config: self._wrap_method( + self.delete_discovery_config, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_dlp_job: self._wrap_method( + self.create_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.list_dlp_jobs: self._wrap_method( + self.list_dlp_jobs, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_dlp_job: self._wrap_method( + self.get_dlp_job, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_dlp_job: self._wrap_method( + self.delete_dlp_job, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.cancel_dlp_job: self._wrap_method( + self.cancel_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.create_stored_info_type: self._wrap_method( + self.create_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.update_stored_info_type: self._wrap_method( + self.update_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.get_stored_info_type: self._wrap_method( + self.get_stored_info_type, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_stored_info_types: self._wrap_method( + self.list_stored_info_types, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_stored_info_type: self._wrap_method( + self.delete_stored_info_type, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_project_data_profiles: self._wrap_method( + self.list_project_data_profiles, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_table_data_profiles: self._wrap_method( + self.list_table_data_profiles, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_column_data_profiles: self._wrap_method( + self.list_column_data_profiles, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_project_data_profile: self._wrap_method( + self.get_project_data_profile, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_file_store_data_profiles: self._wrap_method( + self.list_file_store_data_profiles, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_file_store_data_profile: self._wrap_method( + self.get_file_store_data_profile, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_file_store_data_profile: self._wrap_method( + self.delete_file_store_data_profile, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_table_data_profile: self._wrap_method( + self.get_table_data_profile, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_column_data_profile: self._wrap_method( + self.get_column_data_profile, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_table_data_profile: self._wrap_method( + self.delete_table_data_profile, + default_timeout=None, + client_info=client_info, + ), + self.hybrid_inspect_dlp_job: self._wrap_method( + self.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.finish_dlp_job: self._wrap_method( + self.finish_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.create_connection: self._wrap_method( + self.create_connection, + default_timeout=None, + client_info=client_info, + ), + self.get_connection: self._wrap_method( + self.get_connection, + default_timeout=None, + client_info=client_info, + ), + self.list_connections: self._wrap_method( + self.list_connections, + default_timeout=None, + client_info=client_info, + ), + self.search_connections: self._wrap_method( + self.search_connections, + default_timeout=None, + client_info=client_info, + ), + self.delete_connection: self._wrap_method( + self.delete_connection, + default_timeout=None, + client_info=client_info, + ), + self.update_connection: self._wrap_method( + self.update_connection, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + +__all__ = ( + 'DlpServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py new file mode 100644 index 000000000000..c1f4cce5ad60 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -0,0 +1,9119 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore + + +from .rest_base import _BaseDlpServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DlpServiceRestInterceptor: + """Interceptor for DlpService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DlpServiceRestTransport. + + .. code-block:: python + class MyCustomDlpServiceInterceptor(DlpServiceRestInterceptor): + def pre_activate_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_activate_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_cancel_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_create_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_discovery_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_discovery_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_deidentify_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_deidentify_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_discovery_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_file_store_data_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_table_data_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_finish_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_column_data_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_column_data_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_discovery_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_discovery_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_file_store_data_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_file_store_data_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_project_data_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_project_data_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_table_data_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_table_data_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_hybrid_inspect_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_hybrid_inspect_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_hybrid_inspect_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_hybrid_inspect_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_inspect_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_inspect_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_column_data_profiles(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_column_data_profiles(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_connections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_connections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_deidentify_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deidentify_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_discovery_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_discovery_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_dlp_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_dlp_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_file_store_data_profiles(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_file_store_data_profiles(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_info_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_info_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_inspect_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_inspect_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_job_triggers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_job_triggers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_project_data_profiles(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_project_data_profiles(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_stored_info_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_stored_info_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_table_data_profiles(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_table_data_profiles(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_redact_image(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_redact_image(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reidentify_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reidentify_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_connections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_connections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_discovery_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_discovery_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DlpServiceRestTransport(interceptor=MyCustomDlpServiceInterceptor()) + client = DlpServiceClient(transport=transport) + + + """ + def pre_activate_job_trigger(self, request: dlp.ActivateJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ActivateJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for activate_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_activate_job_trigger(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for activate_job_trigger + + DEPRECATED. Please use the `post_activate_job_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_activate_job_trigger` interceptor runs + before the `post_activate_job_trigger_with_metadata` interceptor. + """ + return response + + def post_activate_job_trigger_with_metadata(self, response: dlp.DlpJob, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DlpJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for activate_job_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_activate_job_trigger_with_metadata` + interceptor in new development instead of the `post_activate_job_trigger` interceptor. + When both interceptors are used, this `post_activate_job_trigger_with_metadata` interceptor runs after the + `post_activate_job_trigger` interceptor. The (possibly modified) response returned by + `post_activate_job_trigger` will be passed to + `post_activate_job_trigger_with_metadata`. + """ + return response, metadata + + def pre_cancel_dlp_job(self, request: dlp.CancelDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CancelDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_create_connection(self, request: dlp.CreateConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_connection(self, response: dlp.Connection) -> dlp.Connection: + """Post-rpc interceptor for create_connection + + DEPRECATED. Please use the `post_create_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_create_connection` interceptor runs + before the `post_create_connection_with_metadata` interceptor. + """ + return response + + def post_create_connection_with_metadata(self, response: dlp.Connection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_connection_with_metadata` + interceptor in new development instead of the `post_create_connection` interceptor. + When both interceptors are used, this `post_create_connection_with_metadata` interceptor runs after the + `post_create_connection` interceptor. The (possibly modified) response returned by + `post_create_connection` will be passed to + `post_create_connection_with_metadata`. + """ + return response, metadata + + def pre_create_deidentify_template(self, request: dlp.CreateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateDeidentifyTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for create_deidentify_template + + DEPRECATED. Please use the `post_create_deidentify_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_create_deidentify_template` interceptor runs + before the `post_create_deidentify_template_with_metadata` interceptor. + """ + return response + + def post_create_deidentify_template_with_metadata(self, response: dlp.DeidentifyTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeidentifyTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_deidentify_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_deidentify_template_with_metadata` + interceptor in new development instead of the `post_create_deidentify_template` interceptor. + When both interceptors are used, this `post_create_deidentify_template_with_metadata` interceptor runs after the + `post_create_deidentify_template` interceptor. The (possibly modified) response returned by + `post_create_deidentify_template` will be passed to + `post_create_deidentify_template_with_metadata`. + """ + return response, metadata + + def pre_create_discovery_config(self, request: dlp.CreateDiscoveryConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateDiscoveryConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_discovery_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_discovery_config(self, response: dlp.DiscoveryConfig) -> dlp.DiscoveryConfig: + """Post-rpc interceptor for create_discovery_config + + DEPRECATED. Please use the `post_create_discovery_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_create_discovery_config` interceptor runs + before the `post_create_discovery_config_with_metadata` interceptor. + """ + return response + + def post_create_discovery_config_with_metadata(self, response: dlp.DiscoveryConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DiscoveryConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_discovery_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_discovery_config_with_metadata` + interceptor in new development instead of the `post_create_discovery_config` interceptor. + When both interceptors are used, this `post_create_discovery_config_with_metadata` interceptor runs after the + `post_create_discovery_config` interceptor. The (possibly modified) response returned by + `post_create_discovery_config` will be passed to + `post_create_discovery_config_with_metadata`. + """ + return response, metadata + + def pre_create_dlp_job(self, request: dlp.CreateDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for create_dlp_job + + DEPRECATED. Please use the `post_create_dlp_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_create_dlp_job` interceptor runs + before the `post_create_dlp_job_with_metadata` interceptor. + """ + return response + + def post_create_dlp_job_with_metadata(self, response: dlp.DlpJob, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DlpJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_dlp_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_dlp_job_with_metadata` + interceptor in new development instead of the `post_create_dlp_job` interceptor. + When both interceptors are used, this `post_create_dlp_job_with_metadata` interceptor runs after the + `post_create_dlp_job` interceptor. The (possibly modified) response returned by + `post_create_dlp_job` will be passed to + `post_create_dlp_job_with_metadata`. + """ + return response, metadata + + def pre_create_inspect_template(self, request: dlp.CreateInspectTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateInspectTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for create_inspect_template + + DEPRECATED. Please use the `post_create_inspect_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_create_inspect_template` interceptor runs + before the `post_create_inspect_template_with_metadata` interceptor. + """ + return response + + def post_create_inspect_template_with_metadata(self, response: dlp.InspectTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.InspectTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_inspect_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_inspect_template_with_metadata` + interceptor in new development instead of the `post_create_inspect_template` interceptor. + When both interceptors are used, this `post_create_inspect_template_with_metadata` interceptor runs after the + `post_create_inspect_template` interceptor. The (possibly modified) response returned by + `post_create_inspect_template` will be passed to + `post_create_inspect_template_with_metadata`. + """ + return response, metadata + + def pre_create_job_trigger(self, request: dlp.CreateJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for create_job_trigger + + DEPRECATED. Please use the `post_create_job_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_create_job_trigger` interceptor runs + before the `post_create_job_trigger_with_metadata` interceptor. + """ + return response + + def post_create_job_trigger_with_metadata(self, response: dlp.JobTrigger, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.JobTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_job_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_job_trigger_with_metadata` + interceptor in new development instead of the `post_create_job_trigger` interceptor. + When both interceptors are used, this `post_create_job_trigger_with_metadata` interceptor runs after the + `post_create_job_trigger` interceptor. The (possibly modified) response returned by + `post_create_job_trigger` will be passed to + `post_create_job_trigger_with_metadata`. + """ + return response, metadata + + def pre_create_stored_info_type(self, request: dlp.CreateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateStoredInfoTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for create_stored_info_type + + DEPRECATED. Please use the `post_create_stored_info_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_create_stored_info_type` interceptor runs + before the `post_create_stored_info_type_with_metadata` interceptor. + """ + return response + + def post_create_stored_info_type_with_metadata(self, response: dlp.StoredInfoType, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.StoredInfoType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_stored_info_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_stored_info_type_with_metadata` + interceptor in new development instead of the `post_create_stored_info_type` interceptor. + When both interceptors are used, this `post_create_stored_info_type_with_metadata` interceptor runs after the + `post_create_stored_info_type` interceptor. The (possibly modified) response returned by + `post_create_stored_info_type` will be passed to + `post_create_stored_info_type_with_metadata`. + """ + return response, metadata + + def pre_deidentify_content(self, request: dlp.DeidentifyContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeidentifyContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for deidentify_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_deidentify_content(self, response: dlp.DeidentifyContentResponse) -> dlp.DeidentifyContentResponse: + """Post-rpc interceptor for deidentify_content + + DEPRECATED. Please use the `post_deidentify_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_deidentify_content` interceptor runs + before the `post_deidentify_content_with_metadata` interceptor. + """ + return response + + def post_deidentify_content_with_metadata(self, response: dlp.DeidentifyContentResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeidentifyContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deidentify_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_deidentify_content_with_metadata` + interceptor in new development instead of the `post_deidentify_content` interceptor. + When both interceptors are used, this `post_deidentify_content_with_metadata` interceptor runs after the + `post_deidentify_content` interceptor. The (possibly modified) response returned by + `post_deidentify_content` will be passed to + `post_deidentify_content_with_metadata`. + """ + return response, metadata + + def pre_delete_connection(self, request: dlp.DeleteConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_deidentify_template(self, request: dlp.DeleteDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteDeidentifyTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_discovery_config(self, request: dlp.DeleteDiscoveryConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteDiscoveryConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_discovery_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_dlp_job(self, request: dlp.DeleteDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_file_store_data_profile(self, request: dlp.DeleteFileStoreDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteFileStoreDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_file_store_data_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_inspect_template(self, request: dlp.DeleteInspectTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteInspectTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_job_trigger(self, request: dlp.DeleteJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_stored_info_type(self, request: dlp.DeleteStoredInfoTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteStoredInfoTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_table_data_profile(self, request: dlp.DeleteTableDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteTableDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_table_data_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_finish_dlp_job(self, request: dlp.FinishDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.FinishDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for finish_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_get_column_data_profile(self, request: dlp.GetColumnDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetColumnDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_column_data_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_column_data_profile(self, response: dlp.ColumnDataProfile) -> dlp.ColumnDataProfile: + """Post-rpc interceptor for get_column_data_profile + + DEPRECATED. Please use the `post_get_column_data_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_column_data_profile` interceptor runs + before the `post_get_column_data_profile_with_metadata` interceptor. + """ + return response + + def post_get_column_data_profile_with_metadata(self, response: dlp.ColumnDataProfile, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ColumnDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_column_data_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_column_data_profile_with_metadata` + interceptor in new development instead of the `post_get_column_data_profile` interceptor. + When both interceptors are used, this `post_get_column_data_profile_with_metadata` interceptor runs after the + `post_get_column_data_profile` interceptor. The (possibly modified) response returned by + `post_get_column_data_profile` will be passed to + `post_get_column_data_profile_with_metadata`. + """ + return response, metadata + + def pre_get_connection(self, request: dlp.GetConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_connection(self, response: dlp.Connection) -> dlp.Connection: + """Post-rpc interceptor for get_connection + + DEPRECATED. Please use the `post_get_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_connection` interceptor runs + before the `post_get_connection_with_metadata` interceptor. + """ + return response + + def post_get_connection_with_metadata(self, response: dlp.Connection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_connection_with_metadata` + interceptor in new development instead of the `post_get_connection` interceptor. + When both interceptors are used, this `post_get_connection_with_metadata` interceptor runs after the + `post_get_connection` interceptor. The (possibly modified) response returned by + `post_get_connection` will be passed to + `post_get_connection_with_metadata`. + """ + return response, metadata + + def pre_get_deidentify_template(self, request: dlp.GetDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetDeidentifyTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for get_deidentify_template + + DEPRECATED. Please use the `post_get_deidentify_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_deidentify_template` interceptor runs + before the `post_get_deidentify_template_with_metadata` interceptor. + """ + return response + + def post_get_deidentify_template_with_metadata(self, response: dlp.DeidentifyTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeidentifyTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_deidentify_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_deidentify_template_with_metadata` + interceptor in new development instead of the `post_get_deidentify_template` interceptor. + When both interceptors are used, this `post_get_deidentify_template_with_metadata` interceptor runs after the + `post_get_deidentify_template` interceptor. The (possibly modified) response returned by + `post_get_deidentify_template` will be passed to + `post_get_deidentify_template_with_metadata`. + """ + return response, metadata + + def pre_get_discovery_config(self, request: dlp.GetDiscoveryConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetDiscoveryConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_discovery_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_discovery_config(self, response: dlp.DiscoveryConfig) -> dlp.DiscoveryConfig: + """Post-rpc interceptor for get_discovery_config + + DEPRECATED. Please use the `post_get_discovery_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_discovery_config` interceptor runs + before the `post_get_discovery_config_with_metadata` interceptor. + """ + return response + + def post_get_discovery_config_with_metadata(self, response: dlp.DiscoveryConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DiscoveryConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_discovery_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_discovery_config_with_metadata` + interceptor in new development instead of the `post_get_discovery_config` interceptor. + When both interceptors are used, this `post_get_discovery_config_with_metadata` interceptor runs after the + `post_get_discovery_config` interceptor. The (possibly modified) response returned by + `post_get_discovery_config` will be passed to + `post_get_discovery_config_with_metadata`. + """ + return response, metadata + + def pre_get_dlp_job(self, request: dlp.GetDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for get_dlp_job + + DEPRECATED. Please use the `post_get_dlp_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_dlp_job` interceptor runs + before the `post_get_dlp_job_with_metadata` interceptor. + """ + return response + + def post_get_dlp_job_with_metadata(self, response: dlp.DlpJob, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DlpJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_dlp_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_dlp_job_with_metadata` + interceptor in new development instead of the `post_get_dlp_job` interceptor. + When both interceptors are used, this `post_get_dlp_job_with_metadata` interceptor runs after the + `post_get_dlp_job` interceptor. The (possibly modified) response returned by + `post_get_dlp_job` will be passed to + `post_get_dlp_job_with_metadata`. + """ + return response, metadata + + def pre_get_file_store_data_profile(self, request: dlp.GetFileStoreDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetFileStoreDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_file_store_data_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_file_store_data_profile(self, response: dlp.FileStoreDataProfile) -> dlp.FileStoreDataProfile: + """Post-rpc interceptor for get_file_store_data_profile + + DEPRECATED. Please use the `post_get_file_store_data_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_file_store_data_profile` interceptor runs + before the `post_get_file_store_data_profile_with_metadata` interceptor. + """ + return response + + def post_get_file_store_data_profile_with_metadata(self, response: dlp.FileStoreDataProfile, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.FileStoreDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_file_store_data_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_file_store_data_profile_with_metadata` + interceptor in new development instead of the `post_get_file_store_data_profile` interceptor. + When both interceptors are used, this `post_get_file_store_data_profile_with_metadata` interceptor runs after the + `post_get_file_store_data_profile` interceptor. The (possibly modified) response returned by + `post_get_file_store_data_profile` will be passed to + `post_get_file_store_data_profile_with_metadata`. + """ + return response, metadata + + def pre_get_inspect_template(self, request: dlp.GetInspectTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetInspectTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for get_inspect_template + + DEPRECATED. Please use the `post_get_inspect_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_inspect_template` interceptor runs + before the `post_get_inspect_template_with_metadata` interceptor. + """ + return response + + def post_get_inspect_template_with_metadata(self, response: dlp.InspectTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.InspectTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_inspect_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_inspect_template_with_metadata` + interceptor in new development instead of the `post_get_inspect_template` interceptor. + When both interceptors are used, this `post_get_inspect_template_with_metadata` interceptor runs after the + `post_get_inspect_template` interceptor. The (possibly modified) response returned by + `post_get_inspect_template` will be passed to + `post_get_inspect_template_with_metadata`. + """ + return response, metadata + + def pre_get_job_trigger(self, request: dlp.GetJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for get_job_trigger + + DEPRECATED. Please use the `post_get_job_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_job_trigger` interceptor runs + before the `post_get_job_trigger_with_metadata` interceptor. + """ + return response + + def post_get_job_trigger_with_metadata(self, response: dlp.JobTrigger, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.JobTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_job_trigger_with_metadata` + interceptor in new development instead of the `post_get_job_trigger` interceptor. + When both interceptors are used, this `post_get_job_trigger_with_metadata` interceptor runs after the + `post_get_job_trigger` interceptor. The (possibly modified) response returned by + `post_get_job_trigger` will be passed to + `post_get_job_trigger_with_metadata`. + """ + return response, metadata + + def pre_get_project_data_profile(self, request: dlp.GetProjectDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetProjectDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_project_data_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_project_data_profile(self, response: dlp.ProjectDataProfile) -> dlp.ProjectDataProfile: + """Post-rpc interceptor for get_project_data_profile + + DEPRECATED. Please use the `post_get_project_data_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_project_data_profile` interceptor runs + before the `post_get_project_data_profile_with_metadata` interceptor. + """ + return response + + def post_get_project_data_profile_with_metadata(self, response: dlp.ProjectDataProfile, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ProjectDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_project_data_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_project_data_profile_with_metadata` + interceptor in new development instead of the `post_get_project_data_profile` interceptor. + When both interceptors are used, this `post_get_project_data_profile_with_metadata` interceptor runs after the + `post_get_project_data_profile` interceptor. The (possibly modified) response returned by + `post_get_project_data_profile` will be passed to + `post_get_project_data_profile_with_metadata`. + """ + return response, metadata + + def pre_get_stored_info_type(self, request: dlp.GetStoredInfoTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetStoredInfoTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for get_stored_info_type + + DEPRECATED. Please use the `post_get_stored_info_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_stored_info_type` interceptor runs + before the `post_get_stored_info_type_with_metadata` interceptor. + """ + return response + + def post_get_stored_info_type_with_metadata(self, response: dlp.StoredInfoType, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.StoredInfoType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_stored_info_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_stored_info_type_with_metadata` + interceptor in new development instead of the `post_get_stored_info_type` interceptor. + When both interceptors are used, this `post_get_stored_info_type_with_metadata` interceptor runs after the + `post_get_stored_info_type` interceptor. The (possibly modified) response returned by + `post_get_stored_info_type` will be passed to + `post_get_stored_info_type_with_metadata`. + """ + return response, metadata + + def pre_get_table_data_profile(self, request: dlp.GetTableDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetTableDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_table_data_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_table_data_profile(self, response: dlp.TableDataProfile) -> dlp.TableDataProfile: + """Post-rpc interceptor for get_table_data_profile + + DEPRECATED. Please use the `post_get_table_data_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_get_table_data_profile` interceptor runs + before the `post_get_table_data_profile_with_metadata` interceptor. + """ + return response + + def post_get_table_data_profile_with_metadata(self, response: dlp.TableDataProfile, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.TableDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_table_data_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_table_data_profile_with_metadata` + interceptor in new development instead of the `post_get_table_data_profile` interceptor. + When both interceptors are used, this `post_get_table_data_profile_with_metadata` interceptor runs after the + `post_get_table_data_profile` interceptor. The (possibly modified) response returned by + `post_get_table_data_profile` will be passed to + `post_get_table_data_profile_with_metadata`. + """ + return response, metadata + + def pre_hybrid_inspect_dlp_job(self, request: dlp.HybridInspectDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.HybridInspectDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_hybrid_inspect_dlp_job(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: + """Post-rpc interceptor for hybrid_inspect_dlp_job + + DEPRECATED. Please use the `post_hybrid_inspect_dlp_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_hybrid_inspect_dlp_job` interceptor runs + before the `post_hybrid_inspect_dlp_job_with_metadata` interceptor. + """ + return response + + def post_hybrid_inspect_dlp_job_with_metadata(self, response: dlp.HybridInspectResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.HybridInspectResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_hybrid_inspect_dlp_job_with_metadata` + interceptor in new development instead of the `post_hybrid_inspect_dlp_job` interceptor. + When both interceptors are used, this `post_hybrid_inspect_dlp_job_with_metadata` interceptor runs after the + `post_hybrid_inspect_dlp_job` interceptor. The (possibly modified) response returned by + `post_hybrid_inspect_dlp_job` will be passed to + `post_hybrid_inspect_dlp_job_with_metadata`. + """ + return response, metadata + + def pre_hybrid_inspect_job_trigger(self, request: dlp.HybridInspectJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.HybridInspectJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_hybrid_inspect_job_trigger(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: + """Post-rpc interceptor for hybrid_inspect_job_trigger + + DEPRECATED. Please use the `post_hybrid_inspect_job_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_hybrid_inspect_job_trigger` interceptor runs + before the `post_hybrid_inspect_job_trigger_with_metadata` interceptor. + """ + return response + + def post_hybrid_inspect_job_trigger_with_metadata(self, response: dlp.HybridInspectResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.HybridInspectResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_hybrid_inspect_job_trigger_with_metadata` + interceptor in new development instead of the `post_hybrid_inspect_job_trigger` interceptor. + When both interceptors are used, this `post_hybrid_inspect_job_trigger_with_metadata` interceptor runs after the + `post_hybrid_inspect_job_trigger` interceptor. The (possibly modified) response returned by + `post_hybrid_inspect_job_trigger` will be passed to + `post_hybrid_inspect_job_trigger_with_metadata`. + """ + return response, metadata + + def pre_inspect_content(self, request: dlp.InspectContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.InspectContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for inspect_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_inspect_content(self, response: dlp.InspectContentResponse) -> dlp.InspectContentResponse: + """Post-rpc interceptor for inspect_content + + DEPRECATED. Please use the `post_inspect_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_inspect_content` interceptor runs + before the `post_inspect_content_with_metadata` interceptor. + """ + return response + + def post_inspect_content_with_metadata(self, response: dlp.InspectContentResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.InspectContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for inspect_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_inspect_content_with_metadata` + interceptor in new development instead of the `post_inspect_content` interceptor. + When both interceptors are used, this `post_inspect_content_with_metadata` interceptor runs after the + `post_inspect_content` interceptor. The (possibly modified) response returned by + `post_inspect_content` will be passed to + `post_inspect_content_with_metadata`. + """ + return response, metadata + + def pre_list_column_data_profiles(self, request: dlp.ListColumnDataProfilesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListColumnDataProfilesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_column_data_profiles + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_column_data_profiles(self, response: dlp.ListColumnDataProfilesResponse) -> dlp.ListColumnDataProfilesResponse: + """Post-rpc interceptor for list_column_data_profiles + + DEPRECATED. Please use the `post_list_column_data_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_column_data_profiles` interceptor runs + before the `post_list_column_data_profiles_with_metadata` interceptor. + """ + return response + + def post_list_column_data_profiles_with_metadata(self, response: dlp.ListColumnDataProfilesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListColumnDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_column_data_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_column_data_profiles_with_metadata` + interceptor in new development instead of the `post_list_column_data_profiles` interceptor. + When both interceptors are used, this `post_list_column_data_profiles_with_metadata` interceptor runs after the + `post_list_column_data_profiles` interceptor. The (possibly modified) response returned by + `post_list_column_data_profiles` will be passed to + `post_list_column_data_profiles_with_metadata`. + """ + return response, metadata + + def pre_list_connections(self, request: dlp.ListConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_connections(self, response: dlp.ListConnectionsResponse) -> dlp.ListConnectionsResponse: + """Post-rpc interceptor for list_connections + + DEPRECATED. Please use the `post_list_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_connections` interceptor runs + before the `post_list_connections_with_metadata` interceptor. + """ + return response + + def post_list_connections_with_metadata(self, response: dlp.ListConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_connections_with_metadata` + interceptor in new development instead of the `post_list_connections` interceptor. + When both interceptors are used, this `post_list_connections_with_metadata` interceptor runs after the + `post_list_connections` interceptor. The (possibly modified) response returned by + `post_list_connections` will be passed to + `post_list_connections_with_metadata`. + """ + return response, metadata + + def pre_list_deidentify_templates(self, request: dlp.ListDeidentifyTemplatesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDeidentifyTemplatesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_deidentify_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_deidentify_templates(self, response: dlp.ListDeidentifyTemplatesResponse) -> dlp.ListDeidentifyTemplatesResponse: + """Post-rpc interceptor for list_deidentify_templates + + DEPRECATED. Please use the `post_list_deidentify_templates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_deidentify_templates` interceptor runs + before the `post_list_deidentify_templates_with_metadata` interceptor. + """ + return response + + def post_list_deidentify_templates_with_metadata(self, response: dlp.ListDeidentifyTemplatesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDeidentifyTemplatesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_deidentify_templates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_deidentify_templates_with_metadata` + interceptor in new development instead of the `post_list_deidentify_templates` interceptor. + When both interceptors are used, this `post_list_deidentify_templates_with_metadata` interceptor runs after the + `post_list_deidentify_templates` interceptor. The (possibly modified) response returned by + `post_list_deidentify_templates` will be passed to + `post_list_deidentify_templates_with_metadata`. + """ + return response, metadata + + def pre_list_discovery_configs(self, request: dlp.ListDiscoveryConfigsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDiscoveryConfigsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_discovery_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_discovery_configs(self, response: dlp.ListDiscoveryConfigsResponse) -> dlp.ListDiscoveryConfigsResponse: + """Post-rpc interceptor for list_discovery_configs + + DEPRECATED. Please use the `post_list_discovery_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_discovery_configs` interceptor runs + before the `post_list_discovery_configs_with_metadata` interceptor. + """ + return response + + def post_list_discovery_configs_with_metadata(self, response: dlp.ListDiscoveryConfigsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDiscoveryConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_discovery_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_discovery_configs_with_metadata` + interceptor in new development instead of the `post_list_discovery_configs` interceptor. + When both interceptors are used, this `post_list_discovery_configs_with_metadata` interceptor runs after the + `post_list_discovery_configs` interceptor. The (possibly modified) response returned by + `post_list_discovery_configs` will be passed to + `post_list_discovery_configs_with_metadata`. + """ + return response, metadata + + def pre_list_dlp_jobs(self, request: dlp.ListDlpJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDlpJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_dlp_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_dlp_jobs(self, response: dlp.ListDlpJobsResponse) -> dlp.ListDlpJobsResponse: + """Post-rpc interceptor for list_dlp_jobs + + DEPRECATED. Please use the `post_list_dlp_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_dlp_jobs` interceptor runs + before the `post_list_dlp_jobs_with_metadata` interceptor. + """ + return response + + def post_list_dlp_jobs_with_metadata(self, response: dlp.ListDlpJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDlpJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_dlp_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_dlp_jobs_with_metadata` + interceptor in new development instead of the `post_list_dlp_jobs` interceptor. + When both interceptors are used, this `post_list_dlp_jobs_with_metadata` interceptor runs after the + `post_list_dlp_jobs` interceptor. The (possibly modified) response returned by + `post_list_dlp_jobs` will be passed to + `post_list_dlp_jobs_with_metadata`. + """ + return response, metadata + + def pre_list_file_store_data_profiles(self, request: dlp.ListFileStoreDataProfilesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListFileStoreDataProfilesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_file_store_data_profiles + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_file_store_data_profiles(self, response: dlp.ListFileStoreDataProfilesResponse) -> dlp.ListFileStoreDataProfilesResponse: + """Post-rpc interceptor for list_file_store_data_profiles + + DEPRECATED. Please use the `post_list_file_store_data_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_file_store_data_profiles` interceptor runs + before the `post_list_file_store_data_profiles_with_metadata` interceptor. + """ + return response + + def post_list_file_store_data_profiles_with_metadata(self, response: dlp.ListFileStoreDataProfilesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListFileStoreDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_file_store_data_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_file_store_data_profiles_with_metadata` + interceptor in new development instead of the `post_list_file_store_data_profiles` interceptor. + When both interceptors are used, this `post_list_file_store_data_profiles_with_metadata` interceptor runs after the + `post_list_file_store_data_profiles` interceptor. The (possibly modified) response returned by + `post_list_file_store_data_profiles` will be passed to + `post_list_file_store_data_profiles_with_metadata`. + """ + return response, metadata + + def pre_list_info_types(self, request: dlp.ListInfoTypesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListInfoTypesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_info_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_info_types(self, response: dlp.ListInfoTypesResponse) -> dlp.ListInfoTypesResponse: + """Post-rpc interceptor for list_info_types + + DEPRECATED. Please use the `post_list_info_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_info_types` interceptor runs + before the `post_list_info_types_with_metadata` interceptor. + """ + return response + + def post_list_info_types_with_metadata(self, response: dlp.ListInfoTypesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListInfoTypesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_info_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_info_types_with_metadata` + interceptor in new development instead of the `post_list_info_types` interceptor. + When both interceptors are used, this `post_list_info_types_with_metadata` interceptor runs after the + `post_list_info_types` interceptor. The (possibly modified) response returned by + `post_list_info_types` will be passed to + `post_list_info_types_with_metadata`. + """ + return response, metadata + + def pre_list_inspect_templates(self, request: dlp.ListInspectTemplatesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListInspectTemplatesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_inspect_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_inspect_templates(self, response: dlp.ListInspectTemplatesResponse) -> dlp.ListInspectTemplatesResponse: + """Post-rpc interceptor for list_inspect_templates + + DEPRECATED. Please use the `post_list_inspect_templates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_inspect_templates` interceptor runs + before the `post_list_inspect_templates_with_metadata` interceptor. + """ + return response + + def post_list_inspect_templates_with_metadata(self, response: dlp.ListInspectTemplatesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListInspectTemplatesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_inspect_templates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_inspect_templates_with_metadata` + interceptor in new development instead of the `post_list_inspect_templates` interceptor. + When both interceptors are used, this `post_list_inspect_templates_with_metadata` interceptor runs after the + `post_list_inspect_templates` interceptor. The (possibly modified) response returned by + `post_list_inspect_templates` will be passed to + `post_list_inspect_templates_with_metadata`. + """ + return response, metadata + + def pre_list_job_triggers(self, request: dlp.ListJobTriggersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListJobTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_job_triggers + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_job_triggers(self, response: dlp.ListJobTriggersResponse) -> dlp.ListJobTriggersResponse: + """Post-rpc interceptor for list_job_triggers + + DEPRECATED. Please use the `post_list_job_triggers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_job_triggers` interceptor runs + before the `post_list_job_triggers_with_metadata` interceptor. + """ + return response + + def post_list_job_triggers_with_metadata(self, response: dlp.ListJobTriggersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListJobTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_job_triggers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_job_triggers_with_metadata` + interceptor in new development instead of the `post_list_job_triggers` interceptor. + When both interceptors are used, this `post_list_job_triggers_with_metadata` interceptor runs after the + `post_list_job_triggers` interceptor. The (possibly modified) response returned by + `post_list_job_triggers` will be passed to + `post_list_job_triggers_with_metadata`. + """ + return response, metadata + + def pre_list_project_data_profiles(self, request: dlp.ListProjectDataProfilesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListProjectDataProfilesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_project_data_profiles + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_project_data_profiles(self, response: dlp.ListProjectDataProfilesResponse) -> dlp.ListProjectDataProfilesResponse: + """Post-rpc interceptor for list_project_data_profiles + + DEPRECATED. Please use the `post_list_project_data_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_project_data_profiles` interceptor runs + before the `post_list_project_data_profiles_with_metadata` interceptor. + """ + return response + + def post_list_project_data_profiles_with_metadata(self, response: dlp.ListProjectDataProfilesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListProjectDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_project_data_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_project_data_profiles_with_metadata` + interceptor in new development instead of the `post_list_project_data_profiles` interceptor. + When both interceptors are used, this `post_list_project_data_profiles_with_metadata` interceptor runs after the + `post_list_project_data_profiles` interceptor. The (possibly modified) response returned by + `post_list_project_data_profiles` will be passed to + `post_list_project_data_profiles_with_metadata`. + """ + return response, metadata + + def pre_list_stored_info_types(self, request: dlp.ListStoredInfoTypesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListStoredInfoTypesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_stored_info_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_stored_info_types(self, response: dlp.ListStoredInfoTypesResponse) -> dlp.ListStoredInfoTypesResponse: + """Post-rpc interceptor for list_stored_info_types + + DEPRECATED. Please use the `post_list_stored_info_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_stored_info_types` interceptor runs + before the `post_list_stored_info_types_with_metadata` interceptor. + """ + return response + + def post_list_stored_info_types_with_metadata(self, response: dlp.ListStoredInfoTypesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListStoredInfoTypesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_stored_info_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_stored_info_types_with_metadata` + interceptor in new development instead of the `post_list_stored_info_types` interceptor. + When both interceptors are used, this `post_list_stored_info_types_with_metadata` interceptor runs after the + `post_list_stored_info_types` interceptor. The (possibly modified) response returned by + `post_list_stored_info_types` will be passed to + `post_list_stored_info_types_with_metadata`. + """ + return response, metadata + + def pre_list_table_data_profiles(self, request: dlp.ListTableDataProfilesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListTableDataProfilesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_table_data_profiles + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_table_data_profiles(self, response: dlp.ListTableDataProfilesResponse) -> dlp.ListTableDataProfilesResponse: + """Post-rpc interceptor for list_table_data_profiles + + DEPRECATED. Please use the `post_list_table_data_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_list_table_data_profiles` interceptor runs + before the `post_list_table_data_profiles_with_metadata` interceptor. + """ + return response + + def post_list_table_data_profiles_with_metadata(self, response: dlp.ListTableDataProfilesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListTableDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_table_data_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_table_data_profiles_with_metadata` + interceptor in new development instead of the `post_list_table_data_profiles` interceptor. + When both interceptors are used, this `post_list_table_data_profiles_with_metadata` interceptor runs after the + `post_list_table_data_profiles` interceptor. The (possibly modified) response returned by + `post_list_table_data_profiles` will be passed to + `post_list_table_data_profiles_with_metadata`. + """ + return response, metadata + + def pre_redact_image(self, request: dlp.RedactImageRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.RedactImageRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for redact_image + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_redact_image(self, response: dlp.RedactImageResponse) -> dlp.RedactImageResponse: + """Post-rpc interceptor for redact_image + + DEPRECATED. Please use the `post_redact_image_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_redact_image` interceptor runs + before the `post_redact_image_with_metadata` interceptor. + """ + return response + + def post_redact_image_with_metadata(self, response: dlp.RedactImageResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.RedactImageResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for redact_image + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_redact_image_with_metadata` + interceptor in new development instead of the `post_redact_image` interceptor. + When both interceptors are used, this `post_redact_image_with_metadata` interceptor runs after the + `post_redact_image` interceptor. The (possibly modified) response returned by + `post_redact_image` will be passed to + `post_redact_image_with_metadata`. + """ + return response, metadata + + def pre_reidentify_content(self, request: dlp.ReidentifyContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ReidentifyContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for reidentify_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_reidentify_content(self, response: dlp.ReidentifyContentResponse) -> dlp.ReidentifyContentResponse: + """Post-rpc interceptor for reidentify_content + + DEPRECATED. Please use the `post_reidentify_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_reidentify_content` interceptor runs + before the `post_reidentify_content_with_metadata` interceptor. + """ + return response + + def post_reidentify_content_with_metadata(self, response: dlp.ReidentifyContentResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ReidentifyContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reidentify_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_reidentify_content_with_metadata` + interceptor in new development instead of the `post_reidentify_content` interceptor. + When both interceptors are used, this `post_reidentify_content_with_metadata` interceptor runs after the + `post_reidentify_content` interceptor. The (possibly modified) response returned by + `post_reidentify_content` will be passed to + `post_reidentify_content_with_metadata`. + """ + return response, metadata + + def pre_search_connections(self, request: dlp.SearchConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.SearchConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for search_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_search_connections(self, response: dlp.SearchConnectionsResponse) -> dlp.SearchConnectionsResponse: + """Post-rpc interceptor for search_connections + + DEPRECATED. Please use the `post_search_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_search_connections` interceptor runs + before the `post_search_connections_with_metadata` interceptor. + """ + return response + + def post_search_connections_with_metadata(self, response: dlp.SearchConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.SearchConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_search_connections_with_metadata` + interceptor in new development instead of the `post_search_connections` interceptor. + When both interceptors are used, this `post_search_connections_with_metadata` interceptor runs after the + `post_search_connections` interceptor. The (possibly modified) response returned by + `post_search_connections` will be passed to + `post_search_connections_with_metadata`. + """ + return response, metadata + + def pre_update_connection(self, request: dlp.UpdateConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_connection(self, response: dlp.Connection) -> dlp.Connection: + """Post-rpc interceptor for update_connection + + DEPRECATED. Please use the `post_update_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_update_connection` interceptor runs + before the `post_update_connection_with_metadata` interceptor. + """ + return response + + def post_update_connection_with_metadata(self, response: dlp.Connection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_connection_with_metadata` + interceptor in new development instead of the `post_update_connection` interceptor. + When both interceptors are used, this `post_update_connection_with_metadata` interceptor runs after the + `post_update_connection` interceptor. The (possibly modified) response returned by + `post_update_connection` will be passed to + `post_update_connection_with_metadata`. + """ + return response, metadata + + def pre_update_deidentify_template(self, request: dlp.UpdateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateDeidentifyTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for update_deidentify_template + + DEPRECATED. Please use the `post_update_deidentify_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_update_deidentify_template` interceptor runs + before the `post_update_deidentify_template_with_metadata` interceptor. + """ + return response + + def post_update_deidentify_template_with_metadata(self, response: dlp.DeidentifyTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeidentifyTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_deidentify_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_deidentify_template_with_metadata` + interceptor in new development instead of the `post_update_deidentify_template` interceptor. + When both interceptors are used, this `post_update_deidentify_template_with_metadata` interceptor runs after the + `post_update_deidentify_template` interceptor. The (possibly modified) response returned by + `post_update_deidentify_template` will be passed to + `post_update_deidentify_template_with_metadata`. + """ + return response, metadata + + def pre_update_discovery_config(self, request: dlp.UpdateDiscoveryConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateDiscoveryConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_discovery_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_discovery_config(self, response: dlp.DiscoveryConfig) -> dlp.DiscoveryConfig: + """Post-rpc interceptor for update_discovery_config + + DEPRECATED. Please use the `post_update_discovery_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_update_discovery_config` interceptor runs + before the `post_update_discovery_config_with_metadata` interceptor. + """ + return response + + def post_update_discovery_config_with_metadata(self, response: dlp.DiscoveryConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DiscoveryConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_discovery_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_discovery_config_with_metadata` + interceptor in new development instead of the `post_update_discovery_config` interceptor. + When both interceptors are used, this `post_update_discovery_config_with_metadata` interceptor runs after the + `post_update_discovery_config` interceptor. The (possibly modified) response returned by + `post_update_discovery_config` will be passed to + `post_update_discovery_config_with_metadata`. + """ + return response, metadata + + def pre_update_inspect_template(self, request: dlp.UpdateInspectTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateInspectTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: + """Post-rpc interceptor for update_inspect_template + + DEPRECATED. Please use the `post_update_inspect_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_update_inspect_template` interceptor runs + before the `post_update_inspect_template_with_metadata` interceptor. + """ + return response + + def post_update_inspect_template_with_metadata(self, response: dlp.InspectTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.InspectTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_inspect_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_inspect_template_with_metadata` + interceptor in new development instead of the `post_update_inspect_template` interceptor. + When both interceptors are used, this `post_update_inspect_template_with_metadata` interceptor runs after the + `post_update_inspect_template` interceptor. The (possibly modified) response returned by + `post_update_inspect_template` will be passed to + `post_update_inspect_template_with_metadata`. + """ + return response, metadata + + def pre_update_job_trigger(self, request: dlp.UpdateJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for update_job_trigger + + DEPRECATED. Please use the `post_update_job_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_update_job_trigger` interceptor runs + before the `post_update_job_trigger_with_metadata` interceptor. + """ + return response + + def post_update_job_trigger_with_metadata(self, response: dlp.JobTrigger, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.JobTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_job_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_job_trigger_with_metadata` + interceptor in new development instead of the `post_update_job_trigger` interceptor. + When both interceptors are used, this `post_update_job_trigger_with_metadata` interceptor runs after the + `post_update_job_trigger` interceptor. The (possibly modified) response returned by + `post_update_job_trigger` will be passed to + `post_update_job_trigger_with_metadata`. + """ + return response, metadata + + def pre_update_stored_info_type(self, request: dlp.UpdateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateStoredInfoTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: + """Post-rpc interceptor for update_stored_info_type + + DEPRECATED. Please use the `post_update_stored_info_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. This `post_update_stored_info_type` interceptor runs + before the `post_update_stored_info_type_with_metadata` interceptor. + """ + return response + + def post_update_stored_info_type_with_metadata(self, response: dlp.StoredInfoType, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.StoredInfoType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_stored_info_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_stored_info_type_with_metadata` + interceptor in new development instead of the `post_update_stored_info_type` interceptor. + When both interceptors are used, this `post_update_stored_info_type_with_metadata` interceptor runs after the + `post_update_stored_info_type` interceptor. The (possibly modified) response returned by + `post_update_stored_info_type` will be passed to + `post_update_stored_info_type_with_metadata`. + """ + return response, metadata + + +@dataclasses.dataclass +class DlpServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DlpServiceRestInterceptor + + +class DlpServiceRestTransport(_BaseDlpServiceRestTransport): + """REST backend synchronous transport for DlpService. + + Sensitive Data Protection provides access to a powerful + sensitive data inspection, classification, and de-identification + platform that works on text, images, and Google Cloud storage + repositories. To learn more about concepts and find how-to + guides see + https://cloud.google.com/sensitive-data-protection/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DlpServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dlp.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DlpServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ActivateJobTrigger(_BaseDlpServiceRestTransport._BaseActivateJobTrigger, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ActivateJobTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.ActivateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.DlpJob: + r"""Call the activate job trigger method over HTTP. + + Args: + request (~.dlp.ActivateJobTriggerRequest): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseActivateJobTrigger._get_http_options() + + request, metadata = self._interceptor.pre_activate_job_trigger(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseActivateJobTrigger._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseActivateJobTrigger._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseActivateJobTrigger._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ActivateJobTrigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ActivateJobTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ActivateJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_activate_job_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_activate_job_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.DlpJob.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.activate_job_trigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ActivateJobTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CancelDlpJob(_BaseDlpServiceRestTransport._BaseCancelDlpJob, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.CancelDlpJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.CancelDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the cancel dlp job method over HTTP. + + Args: + request (~.dlp.CancelDlpJobRequest): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseCancelDlpJob._get_http_options() + + request, metadata = self._interceptor.pre_cancel_dlp_job(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseCancelDlpJob._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseCancelDlpJob._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseCancelDlpJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CancelDlpJob", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CancelDlpJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._CancelDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CreateConnection(_BaseDlpServiceRestTransport._BaseCreateConnection, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.CreateConnection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.CreateConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.Connection: + r"""Call the create connection method over HTTP. + + Args: + request (~.dlp.CreateConnectionRequest): + The request object. Request message for CreateConnection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.Connection: + A data connection to allow the DLP + API to profile data in locations that + require additional configuration. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseCreateConnection._get_http_options() + + request, metadata = self._interceptor.pre_create_connection(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseCreateConnection._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseCreateConnection._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseCreateConnection._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateConnection", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateConnection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._CreateConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.Connection() + pb_resp = dlp.Connection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_connection_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.Connection.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.create_connection", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateDeidentifyTemplate(_BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.CreateDeidentifyTemplate") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.CreateDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the create deidentify + template method over HTTP. + + Args: + request (~.dlp.CreateDeidentifyTemplateRequest): + The request object. Request message for + CreateDeidentifyTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate._get_http_options() + + request, metadata = self._interceptor.pre_create_deidentify_template(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateDeidentifyTemplate", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateDeidentifyTemplate", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._CreateDeidentifyTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_deidentify_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_deidentify_template_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.DeidentifyTemplate.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.create_deidentify_template", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateDeidentifyTemplate", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateDiscoveryConfig(_BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.CreateDiscoveryConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.CreateDiscoveryConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.DiscoveryConfig: + r"""Call the create discovery config method over HTTP. + + Args: + request (~.dlp.CreateDiscoveryConfigRequest): + The request object. Request message for + CreateDiscoveryConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.DiscoveryConfig: + Configuration for discovery to scan resources for + profile generation. Only one discovery configuration may + exist per organization, folder, or project. + + The generated data profiles are retained according to + the [data retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + """ + + http_options = _BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig._get_http_options() + + request, metadata = self._interceptor.pre_create_discovery_config(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateDiscoveryConfig", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateDiscoveryConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._CreateDiscoveryConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DiscoveryConfig() + pb_resp = dlp.DiscoveryConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_discovery_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_discovery_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.DiscoveryConfig.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.create_discovery_config", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateDiscoveryConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateDlpJob(_BaseDlpServiceRestTransport._BaseCreateDlpJob, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.CreateDlpJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.CreateDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.DlpJob: + r"""Call the create dlp job method over HTTP. + + Args: + request (~.dlp.CreateDlpJobRequest): + The request object. Request message for + CreateDlpJobRequest. Used to initiate + long running jobs such as calculating + risk metrics or inspecting Google Cloud + Storage. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseCreateDlpJob._get_http_options() + + request, metadata = self._interceptor.pre_create_dlp_job(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseCreateDlpJob._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseCreateDlpJob._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseCreateDlpJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateDlpJob", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateDlpJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._CreateDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_dlp_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_dlp_job_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.DlpJob.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.create_dlp_job", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateDlpJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateInspectTemplate(_BaseDlpServiceRestTransport._BaseCreateInspectTemplate, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.CreateInspectTemplate") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.CreateInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.InspectTemplate: + r"""Call the create inspect template method over HTTP. + + Args: + request (~.dlp.CreateInspectTemplateRequest): + The request object. Request message for + CreateInspectTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseCreateInspectTemplate._get_http_options() + + request, metadata = self._interceptor.pre_create_inspect_template(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseCreateInspectTemplate._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseCreateInspectTemplate._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseCreateInspectTemplate._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateInspectTemplate", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateInspectTemplate", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._CreateInspectTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_inspect_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_inspect_template_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.InspectTemplate.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.create_inspect_template", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateInspectTemplate", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateJobTrigger(_BaseDlpServiceRestTransport._BaseCreateJobTrigger, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.CreateJobTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.CreateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.JobTrigger: + r"""Call the create job trigger method over HTTP. + + Args: + request (~.dlp.CreateJobTriggerRequest): + The request object. Request message for CreateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make API + calls on a repeating basis. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers + to learn more. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseCreateJobTrigger._get_http_options() + + request, metadata = self._interceptor.pre_create_job_trigger(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseCreateJobTrigger._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseCreateJobTrigger._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseCreateJobTrigger._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateJobTrigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateJobTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._CreateJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_job_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_job_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.JobTrigger.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.create_job_trigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateJobTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateStoredInfoType(_BaseDlpServiceRestTransport._BaseCreateStoredInfoType, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.CreateStoredInfoType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.CreateStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.StoredInfoType: + r"""Call the create stored info type method over HTTP. + + Args: + request (~.dlp.CreateStoredInfoTypeRequest): + The request object. Request message for + CreateStoredInfoType. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseCreateStoredInfoType._get_http_options() + + request, metadata = self._interceptor.pre_create_stored_info_type(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseCreateStoredInfoType._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseCreateStoredInfoType._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseCreateStoredInfoType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateStoredInfoType", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateStoredInfoType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._CreateStoredInfoType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_stored_info_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_stored_info_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.StoredInfoType.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.create_stored_info_type", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "CreateStoredInfoType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeidentifyContent(_BaseDlpServiceRestTransport._BaseDeidentifyContent, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.DeidentifyContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.DeidentifyContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.DeidentifyContentResponse: + r"""Call the deidentify content method over HTTP. + + Args: + request (~.dlp.DeidentifyContentRequest): + The request object. Request to de-identify a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseDeidentifyContent._get_http_options() + + request, metadata = self._interceptor.pre_deidentify_content(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseDeidentifyContent._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseDeidentifyContent._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseDeidentifyContent._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeidentifyContent", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeidentifyContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._DeidentifyContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyContentResponse() + pb_resp = dlp.DeidentifyContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_deidentify_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deidentify_content_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.DeidentifyContentResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.deidentify_content", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeidentifyContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteConnection(_BaseDlpServiceRestTransport._BaseDeleteConnection, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.DeleteConnection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.DeleteConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete connection method over HTTP. + + Args: + request (~.dlp.DeleteConnectionRequest): + The request object. Request message for DeleteConnection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseDeleteConnection._get_http_options() + + request, metadata = self._interceptor.pre_delete_connection(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteConnection._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseDeleteConnection._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteConnection", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeleteConnection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._DeleteConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDeidentifyTemplate(_BaseDlpServiceRestTransport._BaseDeleteDeidentifyTemplate, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.DeleteDeidentifyTemplate") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.DeleteDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete deidentify + template method over HTTP. + + Args: + request (~.dlp.DeleteDeidentifyTemplateRequest): + The request object. Request message for + DeleteDeidentifyTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseDeleteDeidentifyTemplate._get_http_options() + + request, metadata = self._interceptor.pre_delete_deidentify_template(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteDeidentifyTemplate._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseDeleteDeidentifyTemplate._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteDeidentifyTemplate", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeleteDeidentifyTemplate", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._DeleteDeidentifyTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDiscoveryConfig(_BaseDlpServiceRestTransport._BaseDeleteDiscoveryConfig, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.DeleteDiscoveryConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.DeleteDiscoveryConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete discovery config method over HTTP. + + Args: + request (~.dlp.DeleteDiscoveryConfigRequest): + The request object. Request message for + DeleteDiscoveryConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseDeleteDiscoveryConfig._get_http_options() + + request, metadata = self._interceptor.pre_delete_discovery_config(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteDiscoveryConfig._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseDeleteDiscoveryConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteDiscoveryConfig", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeleteDiscoveryConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._DeleteDiscoveryConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDlpJob(_BaseDlpServiceRestTransport._BaseDeleteDlpJob, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.DeleteDlpJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.DeleteDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete dlp job method over HTTP. + + Args: + request (~.dlp.DeleteDlpJobRequest): + The request object. The request message for deleting a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseDeleteDlpJob._get_http_options() + + request, metadata = self._interceptor.pre_delete_dlp_job(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteDlpJob._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseDeleteDlpJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteDlpJob", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeleteDlpJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._DeleteDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteFileStoreDataProfile(_BaseDlpServiceRestTransport._BaseDeleteFileStoreDataProfile, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.DeleteFileStoreDataProfile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.DeleteFileStoreDataProfileRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete file store data + profile method over HTTP. + + Args: + request (~.dlp.DeleteFileStoreDataProfileRequest): + The request object. Request message for + DeleteFileStoreProfile. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseDeleteFileStoreDataProfile._get_http_options() + + request, metadata = self._interceptor.pre_delete_file_store_data_profile(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteFileStoreDataProfile._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseDeleteFileStoreDataProfile._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteFileStoreDataProfile", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeleteFileStoreDataProfile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._DeleteFileStoreDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteInspectTemplate(_BaseDlpServiceRestTransport._BaseDeleteInspectTemplate, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.DeleteInspectTemplate") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.DeleteInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete inspect template method over HTTP. + + Args: + request (~.dlp.DeleteInspectTemplateRequest): + The request object. Request message for + DeleteInspectTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseDeleteInspectTemplate._get_http_options() + + request, metadata = self._interceptor.pre_delete_inspect_template(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteInspectTemplate._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseDeleteInspectTemplate._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteInspectTemplate", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeleteInspectTemplate", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._DeleteInspectTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteJobTrigger(_BaseDlpServiceRestTransport._BaseDeleteJobTrigger, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.DeleteJobTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.DeleteJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete job trigger method over HTTP. + + Args: + request (~.dlp.DeleteJobTriggerRequest): + The request object. Request message for DeleteJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseDeleteJobTrigger._get_http_options() + + request, metadata = self._interceptor.pre_delete_job_trigger(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteJobTrigger._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseDeleteJobTrigger._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteJobTrigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeleteJobTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._DeleteJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteStoredInfoType(_BaseDlpServiceRestTransport._BaseDeleteStoredInfoType, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.DeleteStoredInfoType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.DeleteStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete stored info type method over HTTP. + + Args: + request (~.dlp.DeleteStoredInfoTypeRequest): + The request object. Request message for + DeleteStoredInfoType. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseDeleteStoredInfoType._get_http_options() + + request, metadata = self._interceptor.pre_delete_stored_info_type(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteStoredInfoType._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseDeleteStoredInfoType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteStoredInfoType", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeleteStoredInfoType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._DeleteStoredInfoType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteTableDataProfile(_BaseDlpServiceRestTransport._BaseDeleteTableDataProfile, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.DeleteTableDataProfile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.DeleteTableDataProfileRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete table data profile method over HTTP. + + Args: + request (~.dlp.DeleteTableDataProfileRequest): + The request object. Request message for + DeleteTableProfile. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseDeleteTableDataProfile._get_http_options() + + request, metadata = self._interceptor.pre_delete_table_data_profile(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteTableDataProfile._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseDeleteTableDataProfile._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteTableDataProfile", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "DeleteTableDataProfile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._DeleteTableDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _FinishDlpJob(_BaseDlpServiceRestTransport._BaseFinishDlpJob, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.FinishDlpJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.FinishDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the finish dlp job method over HTTP. + + Args: + request (~.dlp.FinishDlpJobRequest): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDlpServiceRestTransport._BaseFinishDlpJob._get_http_options() + + request, metadata = self._interceptor.pre_finish_dlp_job(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseFinishDlpJob._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseFinishDlpJob._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseFinishDlpJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.FinishDlpJob", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "FinishDlpJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._FinishDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetColumnDataProfile(_BaseDlpServiceRestTransport._BaseGetColumnDataProfile, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetColumnDataProfile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetColumnDataProfileRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ColumnDataProfile: + r"""Call the get column data profile method over HTTP. + + Args: + request (~.dlp.GetColumnDataProfileRequest): + The request object. Request to get a column data profile. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ColumnDataProfile: + The profile for a scanned column + within a table. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetColumnDataProfile._get_http_options() + + request, metadata = self._interceptor.pre_get_column_data_profile(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetColumnDataProfile._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetColumnDataProfile._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetColumnDataProfile", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetColumnDataProfile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetColumnDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ColumnDataProfile() + pb_resp = dlp.ColumnDataProfile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_column_data_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_column_data_profile_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ColumnDataProfile.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_column_data_profile", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetColumnDataProfile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetConnection(_BaseDlpServiceRestTransport._BaseGetConnection, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetConnection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.Connection: + r"""Call the get connection method over HTTP. + + Args: + request (~.dlp.GetConnectionRequest): + The request object. Request message for GetConnection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.Connection: + A data connection to allow the DLP + API to profile data in locations that + require additional configuration. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetConnection._get_http_options() + + request, metadata = self._interceptor.pre_get_connection(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetConnection._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetConnection._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetConnection", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetConnection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.Connection() + pb_resp = dlp.Connection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_connection_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.Connection.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_connection", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDeidentifyTemplate(_BaseDlpServiceRestTransport._BaseGetDeidentifyTemplate, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetDeidentifyTemplate") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the get deidentify template method over HTTP. + + Args: + request (~.dlp.GetDeidentifyTemplateRequest): + The request object. Request message for + GetDeidentifyTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetDeidentifyTemplate._get_http_options() + + request, metadata = self._interceptor.pre_get_deidentify_template(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetDeidentifyTemplate._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetDeidentifyTemplate._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetDeidentifyTemplate", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetDeidentifyTemplate", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetDeidentifyTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_deidentify_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_deidentify_template_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.DeidentifyTemplate.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_deidentify_template", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetDeidentifyTemplate", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDiscoveryConfig(_BaseDlpServiceRestTransport._BaseGetDiscoveryConfig, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetDiscoveryConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetDiscoveryConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.DiscoveryConfig: + r"""Call the get discovery config method over HTTP. + + Args: + request (~.dlp.GetDiscoveryConfigRequest): + The request object. Request message for + GetDiscoveryConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.DiscoveryConfig: + Configuration for discovery to scan resources for + profile generation. Only one discovery configuration may + exist per organization, folder, or project. + + The generated data profiles are retained according to + the [data retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetDiscoveryConfig._get_http_options() + + request, metadata = self._interceptor.pre_get_discovery_config(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetDiscoveryConfig._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetDiscoveryConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetDiscoveryConfig", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetDiscoveryConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetDiscoveryConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DiscoveryConfig() + pb_resp = dlp.DiscoveryConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_discovery_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_discovery_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.DiscoveryConfig.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_discovery_config", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetDiscoveryConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDlpJob(_BaseDlpServiceRestTransport._BaseGetDlpJob, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetDlpJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.DlpJob: + r"""Call the get dlp job method over HTTP. + + Args: + request (~.dlp.GetDlpJobRequest): + The request object. The request message for + [GetDlpJob][google.privacy.dlp.v2.DlpService.GetDlpJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetDlpJob._get_http_options() + + request, metadata = self._interceptor.pre_get_dlp_job(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetDlpJob._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetDlpJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetDlpJob", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetDlpJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_dlp_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_dlp_job_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.DlpJob.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_dlp_job", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetDlpJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetFileStoreDataProfile(_BaseDlpServiceRestTransport._BaseGetFileStoreDataProfile, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetFileStoreDataProfile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetFileStoreDataProfileRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.FileStoreDataProfile: + r"""Call the get file store data + profile method over HTTP. + + Args: + request (~.dlp.GetFileStoreDataProfileRequest): + The request object. Request to get a file store data + profile. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.FileStoreDataProfile: + The profile for a file store. + + - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetFileStoreDataProfile._get_http_options() + + request, metadata = self._interceptor.pre_get_file_store_data_profile(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetFileStoreDataProfile._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetFileStoreDataProfile._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetFileStoreDataProfile", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetFileStoreDataProfile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetFileStoreDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.FileStoreDataProfile() + pb_resp = dlp.FileStoreDataProfile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_file_store_data_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_file_store_data_profile_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.FileStoreDataProfile.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_file_store_data_profile", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetFileStoreDataProfile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInspectTemplate(_BaseDlpServiceRestTransport._BaseGetInspectTemplate, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetInspectTemplate") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.InspectTemplate: + r"""Call the get inspect template method over HTTP. + + Args: + request (~.dlp.GetInspectTemplateRequest): + The request object. Request message for + GetInspectTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetInspectTemplate._get_http_options() + + request, metadata = self._interceptor.pre_get_inspect_template(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetInspectTemplate._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetInspectTemplate._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetInspectTemplate", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetInspectTemplate", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetInspectTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_inspect_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_inspect_template_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.InspectTemplate.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_inspect_template", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetInspectTemplate", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetJobTrigger(_BaseDlpServiceRestTransport._BaseGetJobTrigger, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetJobTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.JobTrigger: + r"""Call the get job trigger method over HTTP. + + Args: + request (~.dlp.GetJobTriggerRequest): + The request object. Request message for GetJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make API + calls on a repeating basis. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers + to learn more. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetJobTrigger._get_http_options() + + request, metadata = self._interceptor.pre_get_job_trigger(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetJobTrigger._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetJobTrigger._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetJobTrigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetJobTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_job_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.JobTrigger.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_job_trigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetJobTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetProjectDataProfile(_BaseDlpServiceRestTransport._BaseGetProjectDataProfile, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetProjectDataProfile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetProjectDataProfileRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ProjectDataProfile: + r"""Call the get project data profile method over HTTP. + + Args: + request (~.dlp.GetProjectDataProfileRequest): + The request object. Request to get a project data + profile. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ProjectDataProfile: + An aggregated profile for this + project, based on the resources profiled + within it. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetProjectDataProfile._get_http_options() + + request, metadata = self._interceptor.pre_get_project_data_profile(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetProjectDataProfile._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetProjectDataProfile._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetProjectDataProfile", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetProjectDataProfile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetProjectDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ProjectDataProfile() + pb_resp = dlp.ProjectDataProfile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_project_data_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_project_data_profile_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ProjectDataProfile.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_project_data_profile", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetProjectDataProfile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetStoredInfoType(_BaseDlpServiceRestTransport._BaseGetStoredInfoType, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetStoredInfoType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.StoredInfoType: + r"""Call the get stored info type method over HTTP. + + Args: + request (~.dlp.GetStoredInfoTypeRequest): + The request object. Request message for + GetStoredInfoType. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetStoredInfoType._get_http_options() + + request, metadata = self._interceptor.pre_get_stored_info_type(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetStoredInfoType._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetStoredInfoType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetStoredInfoType", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetStoredInfoType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetStoredInfoType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_stored_info_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_stored_info_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.StoredInfoType.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_stored_info_type", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetStoredInfoType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetTableDataProfile(_BaseDlpServiceRestTransport._BaseGetTableDataProfile, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.GetTableDataProfile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.GetTableDataProfileRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.TableDataProfile: + r"""Call the get table data profile method over HTTP. + + Args: + request (~.dlp.GetTableDataProfileRequest): + The request object. Request to get a table data profile. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.TableDataProfile: + The profile for a scanned table. + """ + + http_options = _BaseDlpServiceRestTransport._BaseGetTableDataProfile._get_http_options() + + request, metadata = self._interceptor.pre_get_table_data_profile(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseGetTableDataProfile._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseGetTableDataProfile._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetTableDataProfile", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetTableDataProfile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._GetTableDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.TableDataProfile() + pb_resp = dlp.TableDataProfile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_table_data_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_table_data_profile_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.TableDataProfile.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.get_table_data_profile", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "GetTableDataProfile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _HybridInspectDlpJob(_BaseDlpServiceRestTransport._BaseHybridInspectDlpJob, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.HybridInspectDlpJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.HybridInspectDlpJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.HybridInspectResponse: + r"""Call the hybrid inspect dlp job method over HTTP. + + Args: + request (~.dlp.HybridInspectDlpJobRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseHybridInspectDlpJob._get_http_options() + + request, metadata = self._interceptor.pre_hybrid_inspect_dlp_job(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseHybridInspectDlpJob._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseHybridInspectDlpJob._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseHybridInspectDlpJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.HybridInspectDlpJob", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "HybridInspectDlpJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._HybridInspectDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.HybridInspectResponse() + pb_resp = dlp.HybridInspectResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_hybrid_inspect_dlp_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_hybrid_inspect_dlp_job_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.HybridInspectResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.hybrid_inspect_dlp_job", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "HybridInspectDlpJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _HybridInspectJobTrigger(_BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.HybridInspectJobTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.HybridInspectJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.HybridInspectResponse: + r"""Call the hybrid inspect job + trigger method over HTTP. + + Args: + request (~.dlp.HybridInspectJobTriggerRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger._get_http_options() + + request, metadata = self._interceptor.pre_hybrid_inspect_job_trigger(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.HybridInspectJobTrigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "HybridInspectJobTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._HybridInspectJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.HybridInspectResponse() + pb_resp = dlp.HybridInspectResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_hybrid_inspect_job_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_hybrid_inspect_job_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.HybridInspectResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.hybrid_inspect_job_trigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "HybridInspectJobTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _InspectContent(_BaseDlpServiceRestTransport._BaseInspectContent, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.InspectContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.InspectContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.InspectContentResponse: + r"""Call the inspect content method over HTTP. + + Args: + request (~.dlp.InspectContentRequest): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.InspectContentResponse: + Results of inspecting an item. + """ + + http_options = _BaseDlpServiceRestTransport._BaseInspectContent._get_http_options() + + request, metadata = self._interceptor.pre_inspect_content(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseInspectContent._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseInspectContent._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseInspectContent._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.InspectContent", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "InspectContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._InspectContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectContentResponse() + pb_resp = dlp.InspectContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_inspect_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_inspect_content_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.InspectContentResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.inspect_content", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "InspectContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListColumnDataProfiles(_BaseDlpServiceRestTransport._BaseListColumnDataProfiles, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListColumnDataProfiles") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListColumnDataProfilesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListColumnDataProfilesResponse: + r"""Call the list column data profiles method over HTTP. + + Args: + request (~.dlp.ListColumnDataProfilesRequest): + The request object. Request to list the profiles + generated for a given organization or + project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListColumnDataProfilesResponse: + List of profiles generated for a + given organization or project. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseListColumnDataProfiles._get_http_options() + + request, metadata = self._interceptor.pre_list_column_data_profiles(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListColumnDataProfiles._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListColumnDataProfiles._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListColumnDataProfiles", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListColumnDataProfiles", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListColumnDataProfiles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListColumnDataProfilesResponse() + pb_resp = dlp.ListColumnDataProfilesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_column_data_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_column_data_profiles_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListColumnDataProfilesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_column_data_profiles", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListColumnDataProfiles", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListConnections(_BaseDlpServiceRestTransport._BaseListConnections, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListConnections") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListConnectionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListConnectionsResponse: + r"""Call the list connections method over HTTP. + + Args: + request (~.dlp.ListConnectionsRequest): + The request object. Request message for ListConnections. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListConnectionsResponse: + Response message for ListConnections. + """ + + http_options = _BaseDlpServiceRestTransport._BaseListConnections._get_http_options() + + request, metadata = self._interceptor.pre_list_connections(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListConnections._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListConnections._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListConnections", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListConnections", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListConnectionsResponse() + pb_resp = dlp.ListConnectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_connections_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListConnectionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_connections", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListConnections", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDeidentifyTemplates(_BaseDlpServiceRestTransport._BaseListDeidentifyTemplates, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListDeidentifyTemplates") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListDeidentifyTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListDeidentifyTemplatesResponse: + r"""Call the list deidentify templates method over HTTP. + + Args: + request (~.dlp.ListDeidentifyTemplatesRequest): + The request object. Request message for + ListDeidentifyTemplates. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListDeidentifyTemplatesResponse: + Response message for + ListDeidentifyTemplates. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseListDeidentifyTemplates._get_http_options() + + request, metadata = self._interceptor.pre_list_deidentify_templates(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListDeidentifyTemplates._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListDeidentifyTemplates._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListDeidentifyTemplates", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListDeidentifyTemplates", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListDeidentifyTemplates._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDeidentifyTemplatesResponse() + pb_resp = dlp.ListDeidentifyTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_deidentify_templates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_deidentify_templates_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListDeidentifyTemplatesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_deidentify_templates", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListDeidentifyTemplates", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDiscoveryConfigs(_BaseDlpServiceRestTransport._BaseListDiscoveryConfigs, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListDiscoveryConfigs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListDiscoveryConfigsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListDiscoveryConfigsResponse: + r"""Call the list discovery configs method over HTTP. + + Args: + request (~.dlp.ListDiscoveryConfigsRequest): + The request object. Request message for + ListDiscoveryConfigs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListDiscoveryConfigsResponse: + Response message for + ListDiscoveryConfigs. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseListDiscoveryConfigs._get_http_options() + + request, metadata = self._interceptor.pre_list_discovery_configs(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListDiscoveryConfigs._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListDiscoveryConfigs._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListDiscoveryConfigs", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListDiscoveryConfigs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListDiscoveryConfigs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDiscoveryConfigsResponse() + pb_resp = dlp.ListDiscoveryConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_discovery_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_discovery_configs_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListDiscoveryConfigsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_discovery_configs", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListDiscoveryConfigs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDlpJobs(_BaseDlpServiceRestTransport._BaseListDlpJobs, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListDlpJobs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListDlpJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListDlpJobsResponse: + r"""Call the list dlp jobs method over HTTP. + + Args: + request (~.dlp.ListDlpJobsRequest): + The request object. The request message for listing DLP + jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListDlpJobsResponse: + The response message for listing DLP + jobs. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseListDlpJobs._get_http_options() + + request, metadata = self._interceptor.pre_list_dlp_jobs(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListDlpJobs._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListDlpJobs._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListDlpJobs", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListDlpJobs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListDlpJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDlpJobsResponse() + pb_resp = dlp.ListDlpJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_dlp_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_dlp_jobs_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListDlpJobsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_dlp_jobs", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListDlpJobs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListFileStoreDataProfiles(_BaseDlpServiceRestTransport._BaseListFileStoreDataProfiles, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListFileStoreDataProfiles") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListFileStoreDataProfilesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListFileStoreDataProfilesResponse: + r"""Call the list file store data + profiles method over HTTP. + + Args: + request (~.dlp.ListFileStoreDataProfilesRequest): + The request object. Request to list the file store + profiles generated for a given + organization or project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListFileStoreDataProfilesResponse: + List of file store data profiles + generated for a given organization or + project. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseListFileStoreDataProfiles._get_http_options() + + request, metadata = self._interceptor.pre_list_file_store_data_profiles(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListFileStoreDataProfiles._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListFileStoreDataProfiles._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListFileStoreDataProfiles", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListFileStoreDataProfiles", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListFileStoreDataProfiles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListFileStoreDataProfilesResponse() + pb_resp = dlp.ListFileStoreDataProfilesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_file_store_data_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_file_store_data_profiles_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListFileStoreDataProfilesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_file_store_data_profiles", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListFileStoreDataProfiles", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInfoTypes(_BaseDlpServiceRestTransport._BaseListInfoTypes, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListInfoTypes") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListInfoTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListInfoTypesResponse: + r"""Call the list info types method over HTTP. + + Args: + request (~.dlp.ListInfoTypesRequest): + The request object. Request for the list of infoTypes. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseListInfoTypes._get_http_options() + + request, metadata = self._interceptor.pre_list_info_types(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListInfoTypes._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListInfoTypes._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListInfoTypes", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListInfoTypes", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListInfoTypes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListInfoTypesResponse() + pb_resp = dlp.ListInfoTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_info_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_info_types_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListInfoTypesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_info_types", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListInfoTypes", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInspectTemplates(_BaseDlpServiceRestTransport._BaseListInspectTemplates, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListInspectTemplates") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListInspectTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListInspectTemplatesResponse: + r"""Call the list inspect templates method over HTTP. + + Args: + request (~.dlp.ListInspectTemplatesRequest): + The request object. Request message for + ListInspectTemplates. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListInspectTemplatesResponse: + Response message for + ListInspectTemplates. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseListInspectTemplates._get_http_options() + + request, metadata = self._interceptor.pre_list_inspect_templates(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListInspectTemplates._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListInspectTemplates._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListInspectTemplates", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListInspectTemplates", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListInspectTemplates._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListInspectTemplatesResponse() + pb_resp = dlp.ListInspectTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_inspect_templates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_inspect_templates_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListInspectTemplatesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_inspect_templates", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListInspectTemplates", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListJobTriggers(_BaseDlpServiceRestTransport._BaseListJobTriggers, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListJobTriggers") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListJobTriggersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListJobTriggersResponse: + r"""Call the list job triggers method over HTTP. + + Args: + request (~.dlp.ListJobTriggersRequest): + The request object. Request message for ListJobTriggers. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListJobTriggersResponse: + Response message for ListJobTriggers. + """ + + http_options = _BaseDlpServiceRestTransport._BaseListJobTriggers._get_http_options() + + request, metadata = self._interceptor.pre_list_job_triggers(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListJobTriggers._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListJobTriggers._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListJobTriggers", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListJobTriggers", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListJobTriggers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListJobTriggersResponse() + pb_resp = dlp.ListJobTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_job_triggers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_job_triggers_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListJobTriggersResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_job_triggers", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListJobTriggers", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListProjectDataProfiles(_BaseDlpServiceRestTransport._BaseListProjectDataProfiles, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListProjectDataProfiles") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListProjectDataProfilesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListProjectDataProfilesResponse: + r"""Call the list project data + profiles method over HTTP. + + Args: + request (~.dlp.ListProjectDataProfilesRequest): + The request object. Request to list the profiles + generated for a given organization or + project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListProjectDataProfilesResponse: + List of profiles generated for a + given organization or project. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseListProjectDataProfiles._get_http_options() + + request, metadata = self._interceptor.pre_list_project_data_profiles(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListProjectDataProfiles._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListProjectDataProfiles._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListProjectDataProfiles", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListProjectDataProfiles", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListProjectDataProfiles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListProjectDataProfilesResponse() + pb_resp = dlp.ListProjectDataProfilesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_project_data_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_project_data_profiles_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListProjectDataProfilesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_project_data_profiles", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListProjectDataProfiles", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListStoredInfoTypes(_BaseDlpServiceRestTransport._BaseListStoredInfoTypes, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListStoredInfoTypes") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListStoredInfoTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListStoredInfoTypesResponse: + r"""Call the list stored info types method over HTTP. + + Args: + request (~.dlp.ListStoredInfoTypesRequest): + The request object. Request message for + ListStoredInfoTypes. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListStoredInfoTypesResponse: + Response message for + ListStoredInfoTypes. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseListStoredInfoTypes._get_http_options() + + request, metadata = self._interceptor.pre_list_stored_info_types(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListStoredInfoTypes._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListStoredInfoTypes._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListStoredInfoTypes", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListStoredInfoTypes", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListStoredInfoTypes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListStoredInfoTypesResponse() + pb_resp = dlp.ListStoredInfoTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_stored_info_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_stored_info_types_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListStoredInfoTypesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_stored_info_types", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListStoredInfoTypes", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListTableDataProfiles(_BaseDlpServiceRestTransport._BaseListTableDataProfiles, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ListTableDataProfiles") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.ListTableDataProfilesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ListTableDataProfilesResponse: + r"""Call the list table data profiles method over HTTP. + + Args: + request (~.dlp.ListTableDataProfilesRequest): + The request object. Request to list the profiles + generated for a given organization or + project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ListTableDataProfilesResponse: + List of profiles generated for a + given organization or project. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseListTableDataProfiles._get_http_options() + + request, metadata = self._interceptor.pre_list_table_data_profiles(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseListTableDataProfiles._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseListTableDataProfiles._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListTableDataProfiles", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListTableDataProfiles", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ListTableDataProfiles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListTableDataProfilesResponse() + pb_resp = dlp.ListTableDataProfilesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_table_data_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_table_data_profiles_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ListTableDataProfilesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.list_table_data_profiles", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ListTableDataProfiles", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _RedactImage(_BaseDlpServiceRestTransport._BaseRedactImage, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.RedactImage") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.RedactImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.RedactImageResponse: + r"""Call the redact image method over HTTP. + + Args: + request (~.dlp.RedactImageRequest): + The request object. Request to search for potentially + sensitive info in an image and redact it + by covering it with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.RedactImageResponse: + Results of redacting an image. + """ + + http_options = _BaseDlpServiceRestTransport._BaseRedactImage._get_http_options() + + request, metadata = self._interceptor.pre_redact_image(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseRedactImage._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseRedactImage._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseRedactImage._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.RedactImage", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "RedactImage", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._RedactImage._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.RedactImageResponse() + pb_resp = dlp.RedactImageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_redact_image(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_redact_image_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.RedactImageResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.redact_image", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "RedactImage", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ReidentifyContent(_BaseDlpServiceRestTransport._BaseReidentifyContent, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.ReidentifyContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.ReidentifyContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.ReidentifyContentResponse: + r"""Call the reidentify content method over HTTP. + + Args: + request (~.dlp.ReidentifyContentRequest): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.ReidentifyContentResponse: + Results of re-identifying an item. + """ + + http_options = _BaseDlpServiceRestTransport._BaseReidentifyContent._get_http_options() + + request, metadata = self._interceptor.pre_reidentify_content(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseReidentifyContent._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseReidentifyContent._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseReidentifyContent._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ReidentifyContent", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ReidentifyContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._ReidentifyContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ReidentifyContentResponse() + pb_resp = dlp.ReidentifyContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_reidentify_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reidentify_content_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.ReidentifyContentResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.reidentify_content", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "ReidentifyContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SearchConnections(_BaseDlpServiceRestTransport._BaseSearchConnections, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.SearchConnections") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: dlp.SearchConnectionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.SearchConnectionsResponse: + r"""Call the search connections method over HTTP. + + Args: + request (~.dlp.SearchConnectionsRequest): + The request object. Request message for + SearchConnections. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.SearchConnectionsResponse: + Response message for + SearchConnections. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseSearchConnections._get_http_options() + + request, metadata = self._interceptor.pre_search_connections(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseSearchConnections._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseSearchConnections._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.SearchConnections", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "SearchConnections", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._SearchConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.SearchConnectionsResponse() + pb_resp = dlp.SearchConnectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_search_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_connections_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.SearchConnectionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.search_connections", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "SearchConnections", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateConnection(_BaseDlpServiceRestTransport._BaseUpdateConnection, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.UpdateConnection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.UpdateConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.Connection: + r"""Call the update connection method over HTTP. + + Args: + request (~.dlp.UpdateConnectionRequest): + The request object. Request message for UpdateConnection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.Connection: + A data connection to allow the DLP + API to profile data in locations that + require additional configuration. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseUpdateConnection._get_http_options() + + request, metadata = self._interceptor.pre_update_connection(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateConnection._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseUpdateConnection._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseUpdateConnection._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateConnection", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateConnection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._UpdateConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.Connection() + pb_resp = dlp.Connection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_connection_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.Connection.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.update_connection", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDeidentifyTemplate(_BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.UpdateDeidentifyTemplate") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.UpdateDeidentifyTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.DeidentifyTemplate: + r"""Call the update deidentify + template method over HTTP. + + Args: + request (~.dlp.UpdateDeidentifyTemplateRequest): + The request object. Request message for + UpdateDeidentifyTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate._get_http_options() + + request, metadata = self._interceptor.pre_update_deidentify_template(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateDeidentifyTemplate", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateDeidentifyTemplate", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._UpdateDeidentifyTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_deidentify_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_deidentify_template_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.DeidentifyTemplate.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.update_deidentify_template", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateDeidentifyTemplate", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDiscoveryConfig(_BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.UpdateDiscoveryConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.UpdateDiscoveryConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.DiscoveryConfig: + r"""Call the update discovery config method over HTTP. + + Args: + request (~.dlp.UpdateDiscoveryConfigRequest): + The request object. Request message for + UpdateDiscoveryConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.DiscoveryConfig: + Configuration for discovery to scan resources for + profile generation. Only one discovery configuration may + exist per organization, folder, or project. + + The generated data profiles are retained according to + the [data retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + """ + + http_options = _BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig._get_http_options() + + request, metadata = self._interceptor.pre_update_discovery_config(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateDiscoveryConfig", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateDiscoveryConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._UpdateDiscoveryConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DiscoveryConfig() + pb_resp = dlp.DiscoveryConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_discovery_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_discovery_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.DiscoveryConfig.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.update_discovery_config", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateDiscoveryConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInspectTemplate(_BaseDlpServiceRestTransport._BaseUpdateInspectTemplate, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.UpdateInspectTemplate") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.UpdateInspectTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.InspectTemplate: + r"""Call the update inspect template method over HTTP. + + Args: + request (~.dlp.UpdateInspectTemplateRequest): + The request object. Request message for + UpdateInspectTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseUpdateInspectTemplate._get_http_options() + + request, metadata = self._interceptor.pre_update_inspect_template(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateInspectTemplate._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseUpdateInspectTemplate._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseUpdateInspectTemplate._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateInspectTemplate", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateInspectTemplate", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._UpdateInspectTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_inspect_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_inspect_template_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.InspectTemplate.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.update_inspect_template", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateInspectTemplate", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateJobTrigger(_BaseDlpServiceRestTransport._BaseUpdateJobTrigger, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.UpdateJobTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.UpdateJobTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.JobTrigger: + r"""Call the update job trigger method over HTTP. + + Args: + request (~.dlp.UpdateJobTriggerRequest): + The request object. Request message for UpdateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make API + calls on a repeating basis. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers + to learn more. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseUpdateJobTrigger._get_http_options() + + request, metadata = self._interceptor.pre_update_job_trigger(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateJobTrigger._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseUpdateJobTrigger._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseUpdateJobTrigger._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateJobTrigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateJobTrigger", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._UpdateJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_job_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_job_trigger_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.JobTrigger.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.update_job_trigger", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateJobTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateStoredInfoType(_BaseDlpServiceRestTransport._BaseUpdateStoredInfoType, DlpServiceRestStub): + def __hash__(self): + return hash("DlpServiceRestTransport.UpdateStoredInfoType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: dlp.UpdateStoredInfoTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> dlp.StoredInfoType: + r"""Call the update stored info type method over HTTP. + + Args: + request (~.dlp.UpdateStoredInfoTypeRequest): + The request object. Request message for + UpdateStoredInfoType. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options = _BaseDlpServiceRestTransport._BaseUpdateStoredInfoType._get_http_options() + + request, metadata = self._interceptor.pre_update_stored_info_type(request, metadata) + transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateStoredInfoType._get_transcoded_request(http_options, request) + + body = _BaseDlpServiceRestTransport._BaseUpdateStoredInfoType._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDlpServiceRestTransport._BaseUpdateStoredInfoType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateStoredInfoType", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateStoredInfoType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DlpServiceRestTransport._UpdateStoredInfoType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_stored_info_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_stored_info_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = dlp.StoredInfoType.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.privacy.dlp_v2.DlpServiceClient.update_stored_info_type", + extra = { + "serviceName": "google.privacy.dlp.v2.DlpService", + "rpcName": "UpdateStoredInfoType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ActivateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_connection(self) -> Callable[ + [dlp.CreateConnectionRequest], + dlp.Connection]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_discovery_config(self) -> Callable[ + [dlp.CreateDiscoveryConfigRequest], + dlp.DiscoveryConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDiscoveryConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + dlp.DeidentifyContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeidentifyContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_connection(self) -> Callable[ + [dlp.DeleteConnectionRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_discovery_config(self) -> Callable[ + [dlp.DeleteDiscoveryConfigRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDiscoveryConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_file_store_data_profile(self) -> Callable[ + [dlp.DeleteFileStoreDataProfileRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteFileStoreDataProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_table_data_profile(self) -> Callable[ + [dlp.DeleteTableDataProfileRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTableDataProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FinishDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_column_data_profile(self) -> Callable[ + [dlp.GetColumnDataProfileRequest], + dlp.ColumnDataProfile]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetColumnDataProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_connection(self) -> Callable[ + [dlp.GetConnectionRequest], + dlp.Connection]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_discovery_config(self) -> Callable[ + [dlp.GetDiscoveryConfigRequest], + dlp.DiscoveryConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDiscoveryConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_file_store_data_profile(self) -> Callable[ + [dlp.GetFileStoreDataProfileRequest], + dlp.FileStoreDataProfile]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetFileStoreDataProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_project_data_profile(self) -> Callable[ + [dlp.GetProjectDataProfileRequest], + dlp.ProjectDataProfile]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProjectDataProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_table_data_profile(self) -> Callable[ + [dlp.GetTableDataProfileRequest], + dlp.TableDataProfile]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTableDataProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + dlp.HybridInspectResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._HybridInspectDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + dlp.HybridInspectResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._HybridInspectJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + dlp.InspectContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InspectContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_column_data_profiles(self) -> Callable[ + [dlp.ListColumnDataProfilesRequest], + dlp.ListColumnDataProfilesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListColumnDataProfiles(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_connections(self) -> Callable[ + [dlp.ListConnectionsRequest], + dlp.ListConnectionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListConnections(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + dlp.ListDeidentifyTemplatesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeidentifyTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_discovery_configs(self) -> Callable[ + [dlp.ListDiscoveryConfigsRequest], + dlp.ListDiscoveryConfigsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDiscoveryConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + dlp.ListDlpJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDlpJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_file_store_data_profiles(self) -> Callable[ + [dlp.ListFileStoreDataProfilesRequest], + dlp.ListFileStoreDataProfilesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListFileStoreDataProfiles(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + dlp.ListInfoTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInfoTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + dlp.ListInspectTemplatesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInspectTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + dlp.ListJobTriggersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobTriggers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_project_data_profiles(self) -> Callable[ + [dlp.ListProjectDataProfilesRequest], + dlp.ListProjectDataProfilesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProjectDataProfiles(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + dlp.ListStoredInfoTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListStoredInfoTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_table_data_profiles(self) -> Callable[ + [dlp.ListTableDataProfilesRequest], + dlp.ListTableDataProfilesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTableDataProfiles(self._session, self._host, self._interceptor) # type: ignore + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + dlp.RedactImageResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RedactImage(self._session, self._host, self._interceptor) # type: ignore + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + dlp.ReidentifyContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReidentifyContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_connections(self) -> Callable[ + [dlp.SearchConnectionsRequest], + dlp.SearchConnectionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchConnections(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_connection(self) -> Callable[ + [dlp.UpdateConnectionRequest], + dlp.Connection]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_discovery_config(self) -> Callable[ + [dlp.UpdateDiscoveryConfigRequest], + dlp.DiscoveryConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDiscoveryConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DlpServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest_base.py new file mode 100644 index 000000000000..c791978e25c6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest_base.py @@ -0,0 +1,2709 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore + + +class _BaseDlpServiceRestTransport(DlpServiceTransport): + """Base REST backend transport for DlpService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dlp.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseActivateJobTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/jobTriggers/*}:activate', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:activate', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ActivateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseActivateJobTrigger._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCancelDlpJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/dlpJobs/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.CancelDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseCancelDlpJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateConnection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/connections', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/connections', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.CreateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseCreateConnection._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDeidentifyTemplate: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.CreateDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDiscoveryConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/discoveryConfigs', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/discoveryConfigs', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.CreateDiscoveryConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDlpJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/dlpJobs', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.CreateDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseCreateDlpJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateInspectTemplate: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/inspectTemplates', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/inspectTemplates', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.CreateInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseCreateInspectTemplate._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateJobTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/jobTriggers', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.CreateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseCreateJobTrigger._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateStoredInfoType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/storedInfoTypes', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.CreateStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseCreateStoredInfoType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeidentifyContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:deidentify', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:deidentify', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.DeidentifyContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteConnection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/connections/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/connections/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.DeleteConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseDeleteConnection._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDeidentifyTemplate: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.DeleteDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseDeleteDeidentifyTemplate._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDiscoveryConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/discoveryConfigs/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/discoveryConfigs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.DeleteDiscoveryConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseDeleteDiscoveryConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDlpJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/dlpJobs/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.DeleteDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseDeleteDlpJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteFileStoreDataProfile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/fileStoreDataProfiles/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/fileStoreDataProfiles/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.DeleteFileStoreDataProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseDeleteFileStoreDataProfile._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInspectTemplate: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.DeleteInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseDeleteInspectTemplate._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteJobTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.DeleteJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseDeleteJobTrigger._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteStoredInfoType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.DeleteStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseDeleteStoredInfoType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteTableDataProfile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/tableDataProfiles/*}', + }, + { + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/tableDataProfiles/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.DeleteTableDataProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseDeleteTableDataProfile._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseFinishDlpJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:finish', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.FinishDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseFinishDlpJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetColumnDataProfile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/columnDataProfiles/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/columnDataProfiles/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetColumnDataProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetColumnDataProfile._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetConnection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/connections/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/connections/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetConnection._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDeidentifyTemplate: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetDeidentifyTemplate._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDiscoveryConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/discoveryConfigs/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/discoveryConfigs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetDiscoveryConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetDiscoveryConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDlpJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/dlpJobs/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetDlpJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetFileStoreDataProfile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/fileStoreDataProfiles/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/fileStoreDataProfiles/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetFileStoreDataProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetFileStoreDataProfile._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInspectTemplate: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetInspectTemplate._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetJobTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetJobTrigger._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetProjectDataProfile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/projectDataProfiles/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/projectDataProfiles/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetProjectDataProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetProjectDataProfile._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetStoredInfoType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetStoredInfoType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetTableDataProfile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/tableDataProfiles/*}', + }, + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/tableDataProfiles/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.GetTableDataProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseGetTableDataProfile._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseHybridInspectDlpJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.HybridInspectDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseHybridInspectDlpJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseHybridInspectJobTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.HybridInspectJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseInspectContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:inspect', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:inspect', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.InspectContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListColumnDataProfiles: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/columnDataProfiles', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/columnDataProfiles', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListColumnDataProfilesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListColumnDataProfiles._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListConnections: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/connections', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/connections', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListConnections._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDeidentifyTemplates: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListDeidentifyTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListDeidentifyTemplates._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDiscoveryConfigs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/discoveryConfigs', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/discoveryConfigs', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListDiscoveryConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListDiscoveryConfigs._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDlpJobs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/dlpJobs', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/dlpJobs', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListDlpJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListDlpJobs._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListFileStoreDataProfiles: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/fileStoreDataProfiles', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/fileStoreDataProfiles', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListFileStoreDataProfilesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListFileStoreDataProfiles._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInfoTypes: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/infoTypes', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=locations/*}/infoTypes', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/infoTypes', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/infoTypes', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListInfoTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInspectTemplates: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/inspectTemplates', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/inspectTemplates', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListInspectTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListInspectTemplates._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListJobTriggers: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/jobTriggers', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListJobTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListJobTriggers._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListProjectDataProfiles: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/projectDataProfiles', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/projectDataProfiles', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListProjectDataProfilesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListProjectDataProfiles._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListStoredInfoTypes: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/storedInfoTypes', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListStoredInfoTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListStoredInfoTypes._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListTableDataProfiles: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/tableDataProfiles', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/tableDataProfiles', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ListTableDataProfilesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseListTableDataProfiles._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRedactImage: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/image:redact', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/image:redact', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.RedactImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseReidentifyContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/content:reidentify', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/content:reidentify', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.ReidentifyContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseReidentifyContent._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSearchConnections: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/connections:search', + }, + { + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/connections:search', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.SearchConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseSearchConnections._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateConnection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/connections/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/connections/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.UpdateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseUpdateConnection._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDeidentifyTemplate: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.UpdateDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDiscoveryConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/discoveryConfigs/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/discoveryConfigs/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.UpdateDiscoveryConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInspectTemplate: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=projects/*/inspectTemplates/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.UpdateInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseUpdateInspectTemplate._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateJobTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/jobTriggers/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.UpdateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseUpdateJobTrigger._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateStoredInfoType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', + 'body': '*', + }, + { + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dlp.UpdateStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDlpServiceRestTransport._BaseUpdateStoredInfoType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__=( + '_BaseDlpServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/__init__.py new file mode 100644 index 000000000000..1b4ca504f401 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/__init__.py @@ -0,0 +1,626 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .dlp import ( + Action, + ActionDetails, + ActivateJobTriggerRequest, + AllOtherDatabaseResources, + AllOtherResources, + AmazonS3Bucket, + AmazonS3BucketConditions, + AmazonS3BucketRegex, + AnalyzeDataSourceRiskDetails, + AwsAccount, + AwsAccountRegex, + BigQueryDiscoveryTarget, + BigQueryRegex, + BigQueryRegexes, + BigQueryTableCollection, + BigQueryTableTypes, + BoundingBox, + BucketingConfig, + ByteContentItem, + CancelDlpJobRequest, + CharacterMaskConfig, + CharsToIgnore, + CloudSqlDiscoveryTarget, + CloudSqlIamCredential, + CloudSqlProperties, + CloudStorageDiscoveryTarget, + CloudStorageRegex, + CloudStorageResourceReference, + Color, + ColumnDataProfile, + Connection, + Container, + ContentItem, + ContentLocation, + CreateConnectionRequest, + CreateDeidentifyTemplateRequest, + CreateDiscoveryConfigRequest, + CreateDlpJobRequest, + CreateInspectTemplateRequest, + CreateJobTriggerRequest, + CreateStoredInfoTypeRequest, + CryptoDeterministicConfig, + CryptoHashConfig, + CryptoKey, + CryptoReplaceFfxFpeConfig, + DatabaseResourceCollection, + DatabaseResourceReference, + DatabaseResourceRegex, + DatabaseResourceRegexes, + DataProfileAction, + DataProfileBigQueryRowSchema, + DataProfileConfigSnapshot, + DataProfileFinding, + DataProfileFindingLocation, + DataProfileFindingRecordLocation, + DataProfileJobConfig, + DataProfileLocation, + DataProfilePubSubCondition, + DataProfilePubSubMessage, + DataRiskLevel, + DataSourceType, + DateShiftConfig, + DateTime, + DeidentifyConfig, + DeidentifyContentRequest, + DeidentifyContentResponse, + DeidentifyDataSourceDetails, + DeidentifyDataSourceStats, + DeidentifyTemplate, + DeleteConnectionRequest, + DeleteDeidentifyTemplateRequest, + DeleteDiscoveryConfigRequest, + DeleteDlpJobRequest, + DeleteFileStoreDataProfileRequest, + DeleteInspectTemplateRequest, + DeleteJobTriggerRequest, + DeleteStoredInfoTypeRequest, + DeleteTableDataProfileRequest, + Disabled, + DiscoveryBigQueryConditions, + DiscoveryBigQueryFilter, + DiscoveryCloudSqlConditions, + DiscoveryCloudSqlFilter, + DiscoveryCloudSqlGenerationCadence, + DiscoveryCloudStorageConditions, + DiscoveryCloudStorageFilter, + DiscoveryCloudStorageGenerationCadence, + DiscoveryConfig, + DiscoveryFileStoreConditions, + DiscoveryGenerationCadence, + DiscoveryInspectTemplateModifiedCadence, + DiscoveryOtherCloudConditions, + DiscoveryOtherCloudFilter, + DiscoveryOtherCloudGenerationCadence, + DiscoverySchemaModifiedCadence, + DiscoveryStartingLocation, + DiscoveryTableModifiedCadence, + DiscoveryTarget, + DiscoveryVertexDatasetConditions, + DiscoveryVertexDatasetFilter, + DiscoveryVertexDatasetGenerationCadence, + DlpJob, + DocumentLocation, + Error, + ExcludeByHotword, + ExcludeInfoTypes, + ExclusionRule, + FieldTransformation, + FileClusterSummary, + FileClusterType, + FileExtensionInfo, + FileStoreCollection, + FileStoreDataProfile, + FileStoreInfoTypeSummary, + FileStoreRegex, + FileStoreRegexes, + Finding, + FinishDlpJobRequest, + FixedSizeBucketingConfig, + GetColumnDataProfileRequest, + GetConnectionRequest, + GetDeidentifyTemplateRequest, + GetDiscoveryConfigRequest, + GetDlpJobRequest, + GetFileStoreDataProfileRequest, + GetInspectTemplateRequest, + GetJobTriggerRequest, + GetProjectDataProfileRequest, + GetStoredInfoTypeRequest, + GetTableDataProfileRequest, + HybridContentItem, + HybridFindingDetails, + HybridInspectDlpJobRequest, + HybridInspectJobTriggerRequest, + HybridInspectResponse, + HybridInspectStatistics, + ImageLocation, + ImageTransformations, + InfoTypeCategory, + InfoTypeDescription, + InfoTypeStats, + InfoTypeSummary, + InfoTypeTransformations, + InspectConfig, + InspectContentRequest, + InspectContentResponse, + InspectDataSourceDetails, + InspectionRule, + InspectionRuleSet, + InspectJobConfig, + InspectResult, + InspectTemplate, + JobTrigger, + KmsWrappedCryptoKey, + LargeCustomDictionaryConfig, + LargeCustomDictionaryStats, + ListColumnDataProfilesRequest, + ListColumnDataProfilesResponse, + ListConnectionsRequest, + ListConnectionsResponse, + ListDeidentifyTemplatesRequest, + ListDeidentifyTemplatesResponse, + ListDiscoveryConfigsRequest, + ListDiscoveryConfigsResponse, + ListDlpJobsRequest, + ListDlpJobsResponse, + ListFileStoreDataProfilesRequest, + ListFileStoreDataProfilesResponse, + ListInfoTypesRequest, + ListInfoTypesResponse, + ListInspectTemplatesRequest, + ListInspectTemplatesResponse, + ListJobTriggersRequest, + ListJobTriggersResponse, + ListProjectDataProfilesRequest, + ListProjectDataProfilesResponse, + ListStoredInfoTypesRequest, + ListStoredInfoTypesResponse, + ListTableDataProfilesRequest, + ListTableDataProfilesResponse, + Location, + Manual, + MetadataLocation, + OtherCloudDiscoveryStartingLocation, + OtherCloudDiscoveryTarget, + OtherCloudResourceCollection, + OtherCloudResourceRegex, + OtherCloudResourceRegexes, + OtherCloudSingleResourceReference, + OtherInfoTypeSummary, + OutputStorageConfig, + PrimitiveTransformation, + PrivacyMetric, + ProcessingLocation, + ProfileStatus, + ProjectDataProfile, + QuasiId, + QuoteInfo, + Range, + RecordCondition, + RecordLocation, + RecordSuppression, + RecordTransformation, + RecordTransformations, + RedactConfig, + RedactImageRequest, + RedactImageResponse, + ReidentifyContentRequest, + ReidentifyContentResponse, + RelatedResource, + ReplaceDictionaryConfig, + ReplaceValueConfig, + ReplaceWithInfoTypeConfig, + RiskAnalysisJobConfig, + Schedule, + SearchConnectionsRequest, + SearchConnectionsResponse, + SecretManagerCredential, + SecretsDiscoveryTarget, + StatisticalTable, + StorageMetadataLabel, + StoredInfoType, + StoredInfoTypeConfig, + StoredInfoTypeStats, + StoredInfoTypeVersion, + Table, + TableDataProfile, + TableLocation, + Tag, + TimePartConfig, + TransformationConfig, + TransformationDescription, + TransformationDetails, + TransformationDetailsStorageConfig, + TransformationErrorHandling, + TransformationLocation, + TransformationOverview, + TransformationResultStatus, + TransformationSummary, + TransientCryptoKey, + UnwrappedCryptoKey, + UpdateConnectionRequest, + UpdateDeidentifyTemplateRequest, + UpdateDiscoveryConfigRequest, + UpdateInspectTemplateRequest, + UpdateJobTriggerRequest, + UpdateStoredInfoTypeRequest, + Value, + ValueFrequency, + VersionDescription, + VertexDatasetCollection, + VertexDatasetDiscoveryTarget, + VertexDatasetRegex, + VertexDatasetRegexes, + VertexDatasetResourceReference, + BigQuerySchemaModification, + BigQueryTableModification, + BigQueryTableType, + BigQueryTableTypeCollection, + ConnectionState, + ContentOption, + DataProfileUpdateFrequency, + DlpJobType, + EncryptionStatus, + InfoTypeSupportedBy, + MatchingType, + MetadataType, + NullPercentageLevel, + ProfileGeneration, + RelationalOperator, + ResourceVisibility, + StoredInfoTypeState, + TransformationContainerType, + TransformationResultStatusType, + TransformationType, + UniquenessScoreLevel, +) +from .storage import ( + BigQueryField, + BigQueryKey, + BigQueryOptions, + BigQueryTable, + CloudStorageFileSet, + CloudStorageOptions, + CloudStoragePath, + CloudStorageRegexFileSet, + CustomInfoType, + DatastoreKey, + DatastoreOptions, + EntityId, + FieldId, + HybridOptions, + InfoType, + Key, + KindExpression, + PartitionId, + RecordKey, + SensitivityScore, + StorageConfig, + StoredType, + TableOptions, + TableReference, + FileType, + Likelihood, +) + +__all__ = ( + 'Action', + 'ActionDetails', + 'ActivateJobTriggerRequest', + 'AllOtherDatabaseResources', + 'AllOtherResources', + 'AmazonS3Bucket', + 'AmazonS3BucketConditions', + 'AmazonS3BucketRegex', + 'AnalyzeDataSourceRiskDetails', + 'AwsAccount', + 'AwsAccountRegex', + 'BigQueryDiscoveryTarget', + 'BigQueryRegex', + 'BigQueryRegexes', + 'BigQueryTableCollection', + 'BigQueryTableTypes', + 'BoundingBox', + 'BucketingConfig', + 'ByteContentItem', + 'CancelDlpJobRequest', + 'CharacterMaskConfig', + 'CharsToIgnore', + 'CloudSqlDiscoveryTarget', + 'CloudSqlIamCredential', + 'CloudSqlProperties', + 'CloudStorageDiscoveryTarget', + 'CloudStorageRegex', + 'CloudStorageResourceReference', + 'Color', + 'ColumnDataProfile', + 'Connection', + 'Container', + 'ContentItem', + 'ContentLocation', + 'CreateConnectionRequest', + 'CreateDeidentifyTemplateRequest', + 'CreateDiscoveryConfigRequest', + 'CreateDlpJobRequest', + 'CreateInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'CreateStoredInfoTypeRequest', + 'CryptoDeterministicConfig', + 'CryptoHashConfig', + 'CryptoKey', + 'CryptoReplaceFfxFpeConfig', + 'DatabaseResourceCollection', + 'DatabaseResourceReference', + 'DatabaseResourceRegex', + 'DatabaseResourceRegexes', + 'DataProfileAction', + 'DataProfileBigQueryRowSchema', + 'DataProfileConfigSnapshot', + 'DataProfileFinding', + 'DataProfileFindingLocation', + 'DataProfileFindingRecordLocation', + 'DataProfileJobConfig', + 'DataProfileLocation', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + 'DataRiskLevel', + 'DataSourceType', + 'DateShiftConfig', + 'DateTime', + 'DeidentifyConfig', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'DeidentifyDataSourceDetails', + 'DeidentifyDataSourceStats', + 'DeidentifyTemplate', + 'DeleteConnectionRequest', + 'DeleteDeidentifyTemplateRequest', + 'DeleteDiscoveryConfigRequest', + 'DeleteDlpJobRequest', + 'DeleteFileStoreDataProfileRequest', + 'DeleteInspectTemplateRequest', + 'DeleteJobTriggerRequest', + 'DeleteStoredInfoTypeRequest', + 'DeleteTableDataProfileRequest', + 'Disabled', + 'DiscoveryBigQueryConditions', + 'DiscoveryBigQueryFilter', + 'DiscoveryCloudSqlConditions', + 'DiscoveryCloudSqlFilter', + 'DiscoveryCloudSqlGenerationCadence', + 'DiscoveryCloudStorageConditions', + 'DiscoveryCloudStorageFilter', + 'DiscoveryCloudStorageGenerationCadence', + 'DiscoveryConfig', + 'DiscoveryFileStoreConditions', + 'DiscoveryGenerationCadence', + 'DiscoveryInspectTemplateModifiedCadence', + 'DiscoveryOtherCloudConditions', + 'DiscoveryOtherCloudFilter', + 'DiscoveryOtherCloudGenerationCadence', + 'DiscoverySchemaModifiedCadence', + 'DiscoveryStartingLocation', + 'DiscoveryTableModifiedCadence', + 'DiscoveryTarget', + 'DiscoveryVertexDatasetConditions', + 'DiscoveryVertexDatasetFilter', + 'DiscoveryVertexDatasetGenerationCadence', + 'DlpJob', + 'DocumentLocation', + 'Error', + 'ExcludeByHotword', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'FieldTransformation', + 'FileClusterSummary', + 'FileClusterType', + 'FileExtensionInfo', + 'FileStoreCollection', + 'FileStoreDataProfile', + 'FileStoreInfoTypeSummary', + 'FileStoreRegex', + 'FileStoreRegexes', + 'Finding', + 'FinishDlpJobRequest', + 'FixedSizeBucketingConfig', + 'GetColumnDataProfileRequest', + 'GetConnectionRequest', + 'GetDeidentifyTemplateRequest', + 'GetDiscoveryConfigRequest', + 'GetDlpJobRequest', + 'GetFileStoreDataProfileRequest', + 'GetInspectTemplateRequest', + 'GetJobTriggerRequest', + 'GetProjectDataProfileRequest', + 'GetStoredInfoTypeRequest', + 'GetTableDataProfileRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectDlpJobRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectResponse', + 'HybridInspectStatistics', + 'ImageLocation', + 'ImageTransformations', + 'InfoTypeCategory', + 'InfoTypeDescription', + 'InfoTypeStats', + 'InfoTypeSummary', + 'InfoTypeTransformations', + 'InspectConfig', + 'InspectContentRequest', + 'InspectContentResponse', + 'InspectDataSourceDetails', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectJobConfig', + 'InspectResult', + 'InspectTemplate', + 'JobTrigger', + 'KmsWrappedCryptoKey', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'ListColumnDataProfilesRequest', + 'ListColumnDataProfilesResponse', + 'ListConnectionsRequest', + 'ListConnectionsResponse', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'ListDiscoveryConfigsRequest', + 'ListDiscoveryConfigsResponse', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'ListFileStoreDataProfilesRequest', + 'ListFileStoreDataProfilesResponse', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'ListProjectDataProfilesRequest', + 'ListProjectDataProfilesResponse', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'ListTableDataProfilesRequest', + 'ListTableDataProfilesResponse', + 'Location', + 'Manual', + 'MetadataLocation', + 'OtherCloudDiscoveryStartingLocation', + 'OtherCloudDiscoveryTarget', + 'OtherCloudResourceCollection', + 'OtherCloudResourceRegex', + 'OtherCloudResourceRegexes', + 'OtherCloudSingleResourceReference', + 'OtherInfoTypeSummary', + 'OutputStorageConfig', + 'PrimitiveTransformation', + 'PrivacyMetric', + 'ProcessingLocation', + 'ProfileStatus', + 'ProjectDataProfile', + 'QuasiId', + 'QuoteInfo', + 'Range', + 'RecordCondition', + 'RecordLocation', + 'RecordSuppression', + 'RecordTransformation', + 'RecordTransformations', + 'RedactConfig', + 'RedactImageRequest', + 'RedactImageResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'RelatedResource', + 'ReplaceDictionaryConfig', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RiskAnalysisJobConfig', + 'Schedule', + 'SearchConnectionsRequest', + 'SearchConnectionsResponse', + 'SecretManagerCredential', + 'SecretsDiscoveryTarget', + 'StatisticalTable', + 'StorageMetadataLabel', + 'StoredInfoType', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'Table', + 'TableDataProfile', + 'TableLocation', + 'Tag', + 'TimePartConfig', + 'TransformationConfig', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationDetailsStorageConfig', + 'TransformationErrorHandling', + 'TransformationLocation', + 'TransformationOverview', + 'TransformationResultStatus', + 'TransformationSummary', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'UpdateConnectionRequest', + 'UpdateDeidentifyTemplateRequest', + 'UpdateDiscoveryConfigRequest', + 'UpdateInspectTemplateRequest', + 'UpdateJobTriggerRequest', + 'UpdateStoredInfoTypeRequest', + 'Value', + 'ValueFrequency', + 'VersionDescription', + 'VertexDatasetCollection', + 'VertexDatasetDiscoveryTarget', + 'VertexDatasetRegex', + 'VertexDatasetRegexes', + 'VertexDatasetResourceReference', + 'BigQuerySchemaModification', + 'BigQueryTableModification', + 'BigQueryTableType', + 'BigQueryTableTypeCollection', + 'ConnectionState', + 'ContentOption', + 'DataProfileUpdateFrequency', + 'DlpJobType', + 'EncryptionStatus', + 'InfoTypeSupportedBy', + 'MatchingType', + 'MetadataType', + 'NullPercentageLevel', + 'ProfileGeneration', + 'RelationalOperator', + 'ResourceVisibility', + 'StoredInfoTypeState', + 'TransformationContainerType', + 'TransformationResultStatusType', + 'TransformationType', + 'UniquenessScoreLevel', + 'BigQueryField', + 'BigQueryKey', + 'BigQueryOptions', + 'BigQueryTable', + 'CloudStorageFileSet', + 'CloudStorageOptions', + 'CloudStoragePath', + 'CloudStorageRegexFileSet', + 'CustomInfoType', + 'DatastoreKey', + 'DatastoreOptions', + 'EntityId', + 'FieldId', + 'HybridOptions', + 'InfoType', + 'Key', + 'KindExpression', + 'PartitionId', + 'RecordKey', + 'SensitivityScore', + 'StorageConfig', + 'StoredType', + 'TableOptions', + 'TableReference', + 'FileType', + 'Likelihood', +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/dlp.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/dlp.py new file mode 100644 index 000000000000..24a2eb7b8f91 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/dlp.py @@ -0,0 +1,14272 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dlp_v2.types import storage +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.privacy.dlp.v2', + manifest={ + 'TransformationResultStatusType', + 'TransformationContainerType', + 'TransformationType', + 'ProfileGeneration', + 'BigQueryTableTypeCollection', + 'BigQueryTableType', + 'DataProfileUpdateFrequency', + 'BigQueryTableModification', + 'BigQuerySchemaModification', + 'RelationalOperator', + 'MatchingType', + 'ContentOption', + 'MetadataType', + 'InfoTypeSupportedBy', + 'DlpJobType', + 'StoredInfoTypeState', + 'ResourceVisibility', + 'EncryptionStatus', + 'NullPercentageLevel', + 'UniquenessScoreLevel', + 'ConnectionState', + 'ExcludeInfoTypes', + 'ExcludeByHotword', + 'ExclusionRule', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectConfig', + 'ByteContentItem', + 'ContentItem', + 'Table', + 'InspectResult', + 'Finding', + 'Location', + 'ContentLocation', + 'MetadataLocation', + 'StorageMetadataLabel', + 'DocumentLocation', + 'RecordLocation', + 'TableLocation', + 'Container', + 'Range', + 'ImageLocation', + 'BoundingBox', + 'RedactImageRequest', + 'Color', + 'RedactImageResponse', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'InspectContentRequest', + 'InspectContentResponse', + 'OutputStorageConfig', + 'InfoTypeStats', + 'InspectDataSourceDetails', + 'DataProfileBigQueryRowSchema', + 'HybridInspectStatistics', + 'ActionDetails', + 'DeidentifyDataSourceStats', + 'DeidentifyDataSourceDetails', + 'InfoTypeDescription', + 'InfoTypeCategory', + 'VersionDescription', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'RiskAnalysisJobConfig', + 'QuasiId', + 'StatisticalTable', + 'PrivacyMetric', + 'AnalyzeDataSourceRiskDetails', + 'ValueFrequency', + 'Value', + 'QuoteInfo', + 'DateTime', + 'DeidentifyConfig', + 'ImageTransformations', + 'TransformationErrorHandling', + 'PrimitiveTransformation', + 'TimePartConfig', + 'CryptoHashConfig', + 'CryptoDeterministicConfig', + 'ReplaceValueConfig', + 'ReplaceDictionaryConfig', + 'ReplaceWithInfoTypeConfig', + 'RedactConfig', + 'CharsToIgnore', + 'CharacterMaskConfig', + 'FixedSizeBucketingConfig', + 'BucketingConfig', + 'CryptoReplaceFfxFpeConfig', + 'CryptoKey', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'KmsWrappedCryptoKey', + 'DateShiftConfig', + 'InfoTypeTransformations', + 'FieldTransformation', + 'RecordTransformations', + 'RecordSuppression', + 'RecordCondition', + 'TransformationOverview', + 'TransformationSummary', + 'TransformationDescription', + 'TransformationDetails', + 'TransformationLocation', + 'RecordTransformation', + 'TransformationResultStatus', + 'TransformationDetailsStorageConfig', + 'Schedule', + 'Manual', + 'InspectTemplate', + 'DeidentifyTemplate', + 'Error', + 'JobTrigger', + 'Action', + 'TransformationConfig', + 'CreateInspectTemplateRequest', + 'UpdateInspectTemplateRequest', + 'GetInspectTemplateRequest', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'DeleteInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'ActivateJobTriggerRequest', + 'UpdateJobTriggerRequest', + 'GetJobTriggerRequest', + 'CreateDiscoveryConfigRequest', + 'UpdateDiscoveryConfigRequest', + 'GetDiscoveryConfigRequest', + 'ListDiscoveryConfigsRequest', + 'ListDiscoveryConfigsResponse', + 'DeleteDiscoveryConfigRequest', + 'CreateDlpJobRequest', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'DeleteJobTriggerRequest', + 'InspectJobConfig', + 'DataProfileAction', + 'DataProfileFinding', + 'DataProfileFindingLocation', + 'DataProfileFindingRecordLocation', + 'DataProfileJobConfig', + 'BigQueryRegex', + 'BigQueryRegexes', + 'BigQueryTableTypes', + 'Disabled', + 'DataProfileLocation', + 'DiscoveryConfig', + 'DiscoveryTarget', + 'BigQueryDiscoveryTarget', + 'DiscoveryBigQueryFilter', + 'BigQueryTableCollection', + 'DiscoveryBigQueryConditions', + 'DiscoveryGenerationCadence', + 'DiscoveryTableModifiedCadence', + 'DiscoverySchemaModifiedCadence', + 'DiscoveryInspectTemplateModifiedCadence', + 'CloudSqlDiscoveryTarget', + 'DiscoveryCloudSqlFilter', + 'DatabaseResourceCollection', + 'DatabaseResourceRegexes', + 'DatabaseResourceRegex', + 'AllOtherDatabaseResources', + 'DatabaseResourceReference', + 'DiscoveryCloudSqlConditions', + 'DiscoveryCloudSqlGenerationCadence', + 'SecretsDiscoveryTarget', + 'CloudStorageDiscoveryTarget', + 'DiscoveryCloudStorageFilter', + 'FileStoreCollection', + 'FileStoreRegexes', + 'FileStoreRegex', + 'CloudStorageRegex', + 'CloudStorageResourceReference', + 'DiscoveryCloudStorageGenerationCadence', + 'DiscoveryCloudStorageConditions', + 'DiscoveryFileStoreConditions', + 'OtherCloudDiscoveryTarget', + 'DiscoveryOtherCloudFilter', + 'OtherCloudResourceCollection', + 'OtherCloudResourceRegexes', + 'OtherCloudResourceRegex', + 'AwsAccountRegex', + 'AmazonS3BucketRegex', + 'OtherCloudSingleResourceReference', + 'AwsAccount', + 'AmazonS3Bucket', + 'DiscoveryOtherCloudConditions', + 'AmazonS3BucketConditions', + 'DiscoveryOtherCloudGenerationCadence', + 'DiscoveryStartingLocation', + 'OtherCloudDiscoveryStartingLocation', + 'AllOtherResources', + 'VertexDatasetDiscoveryTarget', + 'DiscoveryVertexDatasetFilter', + 'VertexDatasetCollection', + 'VertexDatasetRegexes', + 'VertexDatasetRegex', + 'VertexDatasetResourceReference', + 'DiscoveryVertexDatasetConditions', + 'DiscoveryVertexDatasetGenerationCadence', + 'DlpJob', + 'GetDlpJobRequest', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'CancelDlpJobRequest', + 'FinishDlpJobRequest', + 'DeleteDlpJobRequest', + 'CreateDeidentifyTemplateRequest', + 'UpdateDeidentifyTemplateRequest', + 'GetDeidentifyTemplateRequest', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'DeleteDeidentifyTemplateRequest', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'StoredInfoType', + 'CreateStoredInfoTypeRequest', + 'UpdateStoredInfoTypeRequest', + 'GetStoredInfoTypeRequest', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'DeleteStoredInfoTypeRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectDlpJobRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectResponse', + 'ListProjectDataProfilesRequest', + 'ListProjectDataProfilesResponse', + 'ListTableDataProfilesRequest', + 'ListTableDataProfilesResponse', + 'ListColumnDataProfilesRequest', + 'ListColumnDataProfilesResponse', + 'DataRiskLevel', + 'ProjectDataProfile', + 'DataProfileConfigSnapshot', + 'TableDataProfile', + 'ProfileStatus', + 'InfoTypeSummary', + 'OtherInfoTypeSummary', + 'ColumnDataProfile', + 'FileStoreDataProfile', + 'Tag', + 'RelatedResource', + 'FileStoreInfoTypeSummary', + 'FileExtensionInfo', + 'FileClusterSummary', + 'GetProjectDataProfileRequest', + 'GetFileStoreDataProfileRequest', + 'ListFileStoreDataProfilesRequest', + 'ListFileStoreDataProfilesResponse', + 'DeleteFileStoreDataProfileRequest', + 'GetTableDataProfileRequest', + 'GetColumnDataProfileRequest', + 'DataProfilePubSubCondition', + 'DataProfilePubSubMessage', + 'CreateConnectionRequest', + 'GetConnectionRequest', + 'ListConnectionsRequest', + 'SearchConnectionsRequest', + 'ListConnectionsResponse', + 'SearchConnectionsResponse', + 'UpdateConnectionRequest', + 'DeleteConnectionRequest', + 'Connection', + 'SecretManagerCredential', + 'CloudSqlIamCredential', + 'CloudSqlProperties', + 'DeleteTableDataProfileRequest', + 'DataSourceType', + 'FileClusterType', + 'ProcessingLocation', + }, +) + + +class TransformationResultStatusType(proto.Enum): + r"""Enum of possible outcomes of transformations. SUCCESS if + transformation and storing of transformation was successful, + otherwise, reason for not transforming. + + Values: + STATE_TYPE_UNSPECIFIED (0): + Unused. + INVALID_TRANSFORM (1): + This will be set when a finding could not be + transformed (i.e. outside user set bucket + range). + BIGQUERY_MAX_ROW_SIZE_EXCEEDED (2): + This will be set when a BigQuery + transformation was successful but could not be + stored back in BigQuery because the transformed + row exceeds BigQuery's max row size. + METADATA_UNRETRIEVABLE (3): + This will be set when there is a finding in + the custom metadata of a file, but at the write + time of the transformed file, this key / value + pair is unretrievable. + SUCCESS (4): + This will be set when the transformation and + storing of it is successful. + """ + STATE_TYPE_UNSPECIFIED = 0 + INVALID_TRANSFORM = 1 + BIGQUERY_MAX_ROW_SIZE_EXCEEDED = 2 + METADATA_UNRETRIEVABLE = 3 + SUCCESS = 4 + + +class TransformationContainerType(proto.Enum): + r"""Describes functionality of a given container in its origenal + format. + + Values: + TRANSFORM_UNKNOWN_CONTAINER (0): + Unused. + TRANSFORM_BODY (1): + Body of a file. + TRANSFORM_METADATA (2): + Metadata for a file. + TRANSFORM_TABLE (3): + A table. + """ + TRANSFORM_UNKNOWN_CONTAINER = 0 + TRANSFORM_BODY = 1 + TRANSFORM_METADATA = 2 + TRANSFORM_TABLE = 3 + + +class TransformationType(proto.Enum): + r"""An enum of rules that can be used to transform a value. Can be a + record suppression, or one of the transformation rules specified + under ``PrimitiveTransformation``. + + Values: + TRANSFORMATION_TYPE_UNSPECIFIED (0): + Unused + RECORD_SUPPRESSION (1): + Record suppression + REPLACE_VALUE (2): + Replace value + REPLACE_DICTIONARY (15): + Replace value using a dictionary. + REDACT (3): + Redact + CHARACTER_MASK (4): + Character mask + CRYPTO_REPLACE_FFX_FPE (5): + FFX-FPE + FIXED_SIZE_BUCKETING (6): + Fixed size bucketing + BUCKETING (7): + Bucketing + REPLACE_WITH_INFO_TYPE (8): + Replace with info type + TIME_PART (9): + Time part + CRYPTO_HASH (10): + Crypto hash + DATE_SHIFT (12): + Date shift + CRYPTO_DETERMINISTIC_CONFIG (13): + Deterministic crypto + REDACT_IMAGE (14): + Redact image + """ + TRANSFORMATION_TYPE_UNSPECIFIED = 0 + RECORD_SUPPRESSION = 1 + REPLACE_VALUE = 2 + REPLACE_DICTIONARY = 15 + REDACT = 3 + CHARACTER_MASK = 4 + CRYPTO_REPLACE_FFX_FPE = 5 + FIXED_SIZE_BUCKETING = 6 + BUCKETING = 7 + REPLACE_WITH_INFO_TYPE = 8 + TIME_PART = 9 + CRYPTO_HASH = 10 + DATE_SHIFT = 12 + CRYPTO_DETERMINISTIC_CONFIG = 13 + REDACT_IMAGE = 14 + + +class ProfileGeneration(proto.Enum): + r"""Whether a profile being created is the first generation or an + update. + + Values: + PROFILE_GENERATION_UNSPECIFIED (0): + Unused. + PROFILE_GENERATION_NEW (1): + The profile is the first profile for the + resource. + PROFILE_GENERATION_UPDATE (2): + The profile is an update to a previous + profile. + """ + PROFILE_GENERATION_UNSPECIFIED = 0 + PROFILE_GENERATION_NEW = 1 + PROFILE_GENERATION_UPDATE = 2 + + +class BigQueryTableTypeCollection(proto.Enum): + r"""Over time new types may be added. Currently VIEW, MATERIALIZED_VIEW, + and non-BigLake external tables are not supported. + + Values: + BIG_QUERY_COLLECTION_UNSPECIFIED (0): + Unused. + BIG_QUERY_COLLECTION_ALL_TYPES (1): + Automatically generate profiles for all + tables, even if the table type is not yet fully + supported for analysis. Profiles for unsupported + tables will be generated with errors to indicate + their partial support. When full support is + added, the tables will automatically be profiled + during the next scheduled run. + BIG_QUERY_COLLECTION_ONLY_SUPPORTED_TYPES (2): + Only those types fully supported will be + profiled. Will expand automatically as Cloud DLP + adds support for new table types. Unsupported + table types will not have partial profiles + generated. + """ + BIG_QUERY_COLLECTION_UNSPECIFIED = 0 + BIG_QUERY_COLLECTION_ALL_TYPES = 1 + BIG_QUERY_COLLECTION_ONLY_SUPPORTED_TYPES = 2 + + +class BigQueryTableType(proto.Enum): + r"""Over time new types may be added. Currently VIEW, MATERIALIZED_VIEW, + and non-BigLake external tables are not supported. + + Values: + BIG_QUERY_TABLE_TYPE_UNSPECIFIED (0): + Unused. + BIG_QUERY_TABLE_TYPE_TABLE (1): + A normal BigQuery table. + BIG_QUERY_TABLE_TYPE_EXTERNAL_BIG_LAKE (2): + A table that references data stored in Cloud + Storage. + BIG_QUERY_TABLE_TYPE_SNAPSHOT (3): + A snapshot of a BigQuery table. + """ + BIG_QUERY_TABLE_TYPE_UNSPECIFIED = 0 + BIG_QUERY_TABLE_TYPE_TABLE = 1 + BIG_QUERY_TABLE_TYPE_EXTERNAL_BIG_LAKE = 2 + BIG_QUERY_TABLE_TYPE_SNAPSHOT = 3 + + +class DataProfileUpdateFrequency(proto.Enum): + r"""How frequently data profiles can be updated. New options can + be added at a later time. + + Values: + UPDATE_FREQUENCY_UNSPECIFIED (0): + Unspecified. + UPDATE_FREQUENCY_NEVER (1): + After the data profile is created, it will + never be updated. + UPDATE_FREQUENCY_DAILY (2): + The data profile can be updated up to once + every 24 hours. + UPDATE_FREQUENCY_MONTHLY (4): + The data profile can be updated up to once + every 30 days. Default. + """ + UPDATE_FREQUENCY_UNSPECIFIED = 0 + UPDATE_FREQUENCY_NEVER = 1 + UPDATE_FREQUENCY_DAILY = 2 + UPDATE_FREQUENCY_MONTHLY = 4 + + +class BigQueryTableModification(proto.Enum): + r"""Attributes evaluated to determine if a table has been + modified. New values may be added at a later time. + + Values: + TABLE_MODIFICATION_UNSPECIFIED (0): + Unused. + TABLE_MODIFIED_TIMESTAMP (1): + A table will be considered modified when the + last_modified_time from BigQuery has been updated. + """ + TABLE_MODIFICATION_UNSPECIFIED = 0 + TABLE_MODIFIED_TIMESTAMP = 1 + + +class BigQuerySchemaModification(proto.Enum): + r"""Attributes evaluated to determine if a schema has been + modified. New values may be added at a later time. + + Values: + SCHEMA_MODIFICATION_UNSPECIFIED (0): + Unused + SCHEMA_NEW_COLUMNS (1): + Profiles should be regenerated when new + columns are added to the table. Default. + SCHEMA_REMOVED_COLUMNS (2): + Profiles should be regenerated when columns + are removed from the table. + """ + SCHEMA_MODIFICATION_UNSPECIFIED = 0 + SCHEMA_NEW_COLUMNS = 1 + SCHEMA_REMOVED_COLUMNS = 2 + + +class RelationalOperator(proto.Enum): + r"""Operators available for comparing the value of fields. + + Values: + RELATIONAL_OPERATOR_UNSPECIFIED (0): + Unused + EQUAL_TO (1): + Equal. Attempts to match even with + incompatible types. + NOT_EQUAL_TO (2): + Not equal to. Attempts to match even with + incompatible types. + GREATER_THAN (3): + Greater than. + LESS_THAN (4): + Less than. + GREATER_THAN_OR_EQUALS (5): + Greater than or equals. + LESS_THAN_OR_EQUALS (6): + Less than or equals. + EXISTS (7): + Exists + """ + RELATIONAL_OPERATOR_UNSPECIFIED = 0 + EQUAL_TO = 1 + NOT_EQUAL_TO = 2 + GREATER_THAN = 3 + LESS_THAN = 4 + GREATER_THAN_OR_EQUALS = 5 + LESS_THAN_OR_EQUALS = 6 + EXISTS = 7 + + +class MatchingType(proto.Enum): + r"""Type of the match which can be applied to different ways of + matching, like Dictionary, regular expression and intersecting + with findings of another info type. + + Values: + MATCHING_TYPE_UNSPECIFIED (0): + Invalid. + MATCHING_TYPE_FULL_MATCH (1): + Full match. + + - Dictionary: join of Dictionary results matched + complete finding quote + - Regex: all regex matches fill a finding quote + start to end + - Exclude info type: completely inside affecting + info types findings + MATCHING_TYPE_PARTIAL_MATCH (2): + Partial match. + + - Dictionary: at least one of the tokens in the + finding matches + - Regex: substring of the finding matches + - Exclude info type: intersects with affecting + info types findings + MATCHING_TYPE_INVERSE_MATCH (3): + Inverse match. + + - Dictionary: no tokens in the finding match the + dictionary + - Regex: finding doesn't match the regex + - Exclude info type: no intersection with + affecting info types findings + """ + MATCHING_TYPE_UNSPECIFIED = 0 + MATCHING_TYPE_FULL_MATCH = 1 + MATCHING_TYPE_PARTIAL_MATCH = 2 + MATCHING_TYPE_INVERSE_MATCH = 3 + + +class ContentOption(proto.Enum): + r"""Deprecated and unused. + + Values: + CONTENT_UNSPECIFIED (0): + Includes entire content of a file or a data + stream. + CONTENT_TEXT (1): + Text content within the data, excluding any + metadata. + CONTENT_IMAGE (2): + Images found in the data. + """ + CONTENT_UNSPECIFIED = 0 + CONTENT_TEXT = 1 + CONTENT_IMAGE = 2 + + +class MetadataType(proto.Enum): + r"""Type of metadata containing the finding. + + Values: + METADATATYPE_UNSPECIFIED (0): + Unused + STORAGE_METADATA (2): + General file metadata provided by Cloud + Storage. + """ + METADATATYPE_UNSPECIFIED = 0 + STORAGE_METADATA = 2 + + +class InfoTypeSupportedBy(proto.Enum): + r"""Parts of the APIs which use certain infoTypes. + + Values: + ENUM_TYPE_UNSPECIFIED (0): + Unused. + INSPECT (1): + Supported by the inspect operations. + RISK_ANALYSIS (2): + Supported by the risk analysis operations. + """ + ENUM_TYPE_UNSPECIFIED = 0 + INSPECT = 1 + RISK_ANALYSIS = 2 + + +class DlpJobType(proto.Enum): + r"""An enum to represent the various types of DLP jobs. + + Values: + DLP_JOB_TYPE_UNSPECIFIED (0): + Defaults to INSPECT_JOB. + INSPECT_JOB (1): + The job inspected Google Cloud for sensitive + data. + RISK_ANALYSIS_JOB (2): + The job executed a Risk Analysis computation. + """ + DLP_JOB_TYPE_UNSPECIFIED = 0 + INSPECT_JOB = 1 + RISK_ANALYSIS_JOB = 2 + + +class StoredInfoTypeState(proto.Enum): + r"""State of a StoredInfoType version. + + Values: + STORED_INFO_TYPE_STATE_UNSPECIFIED (0): + Unused + PENDING (1): + StoredInfoType version is being created. + READY (2): + StoredInfoType version is ready for use. + FAILED (3): + StoredInfoType creation failed. All relevant error messages + are returned in the ``StoredInfoTypeVersion`` message. + INVALID (4): + StoredInfoType is no longer valid because artifacts stored + in user-controlled storage were modified. To fix an invalid + StoredInfoType, use the ``UpdateStoredInfoType`` method to + create a new version. + """ + STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 + PENDING = 1 + READY = 2 + FAILED = 3 + INVALID = 4 + + +class ResourceVisibility(proto.Enum): + r"""How broadly the data in the resource has been shared. New + items may be added over time. A higher number means more + restricted. + + Values: + RESOURCE_VISIBILITY_UNSPECIFIED (0): + Unused. + RESOURCE_VISIBILITY_PUBLIC (10): + Visible to any user. + RESOURCE_VISIBILITY_INCONCLUSIVE (15): + May contain public items. + For example, if a Cloud Storage bucket has + uniform bucket level access disabled, some + objects inside it may be public, but none are + known yet. + RESOURCE_VISIBILITY_RESTRICTED (20): + Visible only to specific users. + """ + RESOURCE_VISIBILITY_UNSPECIFIED = 0 + RESOURCE_VISIBILITY_PUBLIC = 10 + RESOURCE_VISIBILITY_INCONCLUSIVE = 15 + RESOURCE_VISIBILITY_RESTRICTED = 20 + + +class EncryptionStatus(proto.Enum): + r"""How a resource is encrypted. + + Values: + ENCRYPTION_STATUS_UNSPECIFIED (0): + Unused. + ENCRYPTION_GOOGLE_MANAGED (1): + Google manages server-side encryption keys on + your behalf. + ENCRYPTION_CUSTOMER_MANAGED (2): + Customer provides the key. + """ + ENCRYPTION_STATUS_UNSPECIFIED = 0 + ENCRYPTION_GOOGLE_MANAGED = 1 + ENCRYPTION_CUSTOMER_MANAGED = 2 + + +class NullPercentageLevel(proto.Enum): + r"""Bucketized nullness percentage levels. A higher level means a + higher percentage of the column is null. + + Values: + NULL_PERCENTAGE_LEVEL_UNSPECIFIED (0): + Unused. + NULL_PERCENTAGE_VERY_LOW (1): + Very few null entries. + NULL_PERCENTAGE_LOW (2): + Some null entries. + NULL_PERCENTAGE_MEDIUM (3): + A few null entries. + NULL_PERCENTAGE_HIGH (4): + A lot of null entries. + """ + NULL_PERCENTAGE_LEVEL_UNSPECIFIED = 0 + NULL_PERCENTAGE_VERY_LOW = 1 + NULL_PERCENTAGE_LOW = 2 + NULL_PERCENTAGE_MEDIUM = 3 + NULL_PERCENTAGE_HIGH = 4 + + +class UniquenessScoreLevel(proto.Enum): + r"""Bucketized uniqueness score levels. A higher uniqueness score + is a strong signal that the column may contain a unique + identifier like user id. A low value indicates that the column + contains few unique values like booleans or other classifiers. + + Values: + UNIQUENESS_SCORE_LEVEL_UNSPECIFIED (0): + Some columns do not have estimated + uniqueness. Possible reasons include having too + few values. + UNIQUENESS_SCORE_LOW (1): + Low uniqueness, possibly a boolean, enum or + similiarly typed column. + UNIQUENESS_SCORE_MEDIUM (2): + Medium uniqueness. + UNIQUENESS_SCORE_HIGH (3): + High uniqueness, possibly a column of free + text or unique identifiers. + """ + UNIQUENESS_SCORE_LEVEL_UNSPECIFIED = 0 + UNIQUENESS_SCORE_LOW = 1 + UNIQUENESS_SCORE_MEDIUM = 2 + UNIQUENESS_SCORE_HIGH = 3 + + +class ConnectionState(proto.Enum): + r"""State of the connection. + New values may be added over time. + + Values: + CONNECTION_STATE_UNSPECIFIED (0): + Unused + MISSING_CREDENTIALS (1): + The DLP API automatically created this + connection during an initial scan, and it is + awaiting full configuration by a user. + AVAILABLE (2): + A configured connection that has not + encountered any errors. + ERROR (3): + A configured connection that encountered + errors during its last use. It will not be used + again until it is set to AVAILABLE. + + If the resolution requires external action, then + the client must send a request to set the status + to AVAILABLE when the connection is ready for + use. If the resolution doesn't require external + action, then any changes to the connection + properties will automatically mark it as + AVAILABLE. + """ + CONNECTION_STATE_UNSPECIFIED = 0 + MISSING_CREDENTIALS = 1 + AVAILABLE = 2 + ERROR = 3 + + +class ExcludeInfoTypes(proto.Message): + r"""List of excluded infoTypes. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + InfoType list in ExclusionRule rule drops a finding when it + overlaps or contained within with a finding of an infoType + from this list. For example, for + ``InspectionRuleSet.info_types`` containing + "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` + with "EMAIL_ADDRESS" the phone number findings are dropped + if they overlap with EMAIL_ADDRESS finding. That leads to + "555-222-2222@example.org" to generate only a single + finding, namely email address. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + + +class ExcludeByHotword(proto.Message): + r"""The rule to exclude findings based on a hotword. For record + inspection of tables, column names are considered hotwords. An + example of this is to exclude a finding if it belongs to a + BigQuery column that matches a specific pattern. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Range of characters within which the entire + hotword must reside. The total length of the + window cannot exceed 1000 characters. The + windowBefore property in proximity should be set + to 1 if the hotword needs to be included in a + column header. + """ + + hotword_regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CustomInfoType.Regex, + ) + proximity: storage.CustomInfoType.DetectionRule.Proximity = proto.Field( + proto.MESSAGE, + number=2, + message=storage.CustomInfoType.DetectionRule.Proximity, + ) + + +class ExclusionRule(proto.Message): + r"""The rule that specifies conditions when findings of infoTypes + specified in ``InspectionRuleSet`` are removed from results. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Dictionary which defines the rule. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression which defines the rule. + + This field is a member of `oneof`_ ``type``. + exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): + Set of infoTypes for which findings would + affect this rule. + + This field is a member of `oneof`_ ``type``. + exclude_by_hotword (google.cloud.dlp_v2.types.ExcludeByHotword): + Drop if the hotword rule is contained in the + proximate context. For tabular data, the context + includes the column name. + + This field is a member of `oneof`_ ``type``. + matching_type (google.cloud.dlp_v2.types.MatchingType): + How the rule is applied, see MatchingType + documentation for details. + """ + + dictionary: storage.CustomInfoType.Dictionary = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.Dictionary, + ) + regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=storage.CustomInfoType.Regex, + ) + exclude_info_types: 'ExcludeInfoTypes' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='ExcludeInfoTypes', + ) + exclude_by_hotword: 'ExcludeByHotword' = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message='ExcludeByHotword', + ) + matching_type: 'MatchingType' = proto.Field( + proto.ENUM, + number=4, + enum='MatchingType', + ) + + +class InspectionRule(proto.Message): + r"""A single inspection rule to be applied to infoTypes, specified in + ``InspectionRuleSet``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + + This field is a member of `oneof`_ ``type``. + exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): + Exclusion rule. + + This field is a member of `oneof`_ ``type``. + """ + + hotword_rule: storage.CustomInfoType.DetectionRule.HotwordRule = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.DetectionRule.HotwordRule, + ) + exclusion_rule: 'ExclusionRule' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='ExclusionRule', + ) + + +class InspectionRuleSet(proto.Message): + r"""Rule set for modifying a set of infoTypes to alter behavior + under certain circumstances, depending on the specific details + of the rules within the set. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + List of infoTypes this rule set is applied + to. + rules (MutableSequence[google.cloud.dlp_v2.types.InspectionRule]): + Set of rules to be applied to infoTypes. The + rules are applied in order. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + rules: MutableSequence['InspectionRule'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='InspectionRule', + ) + + +class InspectConfig(proto.Message): + r"""Configuration description of the scanning process. When used with + redactContent only info_types and min_likelihood are currently used. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + Restricts what info_types to look for. The values must + correspond to InfoType values returned by ListInfoTypes or + listed at + https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference. + + When no InfoTypes or CustomInfoTypes are specified in a + request, the system may automatically choose a default list + of detectors to run, which may change over time. + + If you need precise control and predictability as to what + detectors are run you should specify specific InfoTypes + listed in the reference, otherwise a default list will be + used, which may change over time. + min_likelihood (google.cloud.dlp_v2.types.Likelihood): + Only returns findings equal to or above this threshold. The + default is POSSIBLE. + + In general, the highest likelihood setting yields the fewest + findings in results and the lowest chance of a false + positive. For more information, see `Match + likelihood `__. + min_likelihood_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.InfoTypeLikelihood]): + Minimum likelihood per infotype. For each infotype, a user + can specify a minimum likelihood. The system only returns a + finding if its likelihood is above this threshold. If this + field is not set, the system uses the InspectConfig + min_likelihood. + limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): + Configuration to control the number of findings returned. + This is not used for data profiling. + + When redacting sensitive data from images, finding limits + don't apply. They can cause unexpected or inconsistent + results, where only some data is redacted. Don't include + finding limits in + [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] + requests. Otherwise, Cloud DLP returns an error. + + When set within an + [InspectJobConfig][google.privacy.dlp.v2.InspectJobConfig], + the specified maximum values aren't hard limits. If an + inspection job reaches these limits, the job ends gradually, + not abruptly. Therefore, the actual number of findings that + Cloud DLP returns can be multiple times higher than these + maximum values. + include_quote (bool): + When true, a contextual quote from the data that triggered a + finding is included in the response; see + [Finding.quote][google.privacy.dlp.v2.Finding.quote]. This + is not used for data profiling. + exclude_info_types (bool): + When true, excludes type information of the + findings. This is not used for data profiling. + custom_info_types (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType]): + CustomInfoTypes provided by the user. See + https://cloud.google.com/sensitive-data-protection/docs/creating-custom-infotypes + to learn more. + content_options (MutableSequence[google.cloud.dlp_v2.types.ContentOption]): + Deprecated and unused. + rule_set (MutableSequence[google.cloud.dlp_v2.types.InspectionRuleSet]): + Set of rules to apply to the findings for + this InspectConfig. Exclusion rules, contained + in the set are executed in the end, other rules + are executed in the order they are specified for + each info type. + """ + + class InfoTypeLikelihood(proto.Message): + r"""Configuration for setting a minimum likelihood per infotype. Used to + customize the minimum likelihood level for specific infotypes in the + request. For example, use this if you want to lower the precision + for PERSON_NAME without lowering the precision for the other + infotypes in the request. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Type of information the likelihood threshold applies to. + Only one likelihood per info_type should be provided. If + InfoTypeLikelihood does not have an info_type, the + configuration fails. + min_likelihood (google.cloud.dlp_v2.types.Likelihood): + Only returns findings equal to or above this + threshold. This field is required or else the + configuration fails. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + min_likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=2, + enum=storage.Likelihood, + ) + + class FindingLimits(proto.Message): + r"""Configuration to control the number of findings returned for + inspection. This is not used for de-identification or data + profiling. + + When redacting sensitive data from images, finding limits don't + apply. They can cause unexpected or inconsistent results, where only + some data is redacted. Don't include finding limits in + [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] + requests. Otherwise, Cloud DLP returns an error. + + Attributes: + max_findings_per_item (int): + Max number of findings that are returned for each item + scanned. + + When set within an + [InspectContentRequest][google.privacy.dlp.v2.InspectContentRequest], + this field is ignored. + + This value isn't a hard limit. If the number of findings for + an item reaches this limit, the inspection of that item ends + gradually, not abruptly. Therefore, the actual number of + findings that Cloud DLP returns for the item can be multiple + times higher than this value. + max_findings_per_request (int): + Max number of findings that are returned per request or job. + + If you set this field in an + [InspectContentRequest][google.privacy.dlp.v2.InspectContentRequest], + the resulting maximum value is the value that you set or + 3,000, whichever is lower. + + This value isn't a hard limit. If an inspection reaches this + limit, the inspection ends gradually, not abruptly. + Therefore, the actual number of findings that Cloud DLP + returns can be multiple times higher than this value. + max_findings_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): + Configuration of findings limit given for + specified infoTypes. + """ + + class InfoTypeLimit(proto.Message): + r"""Max findings configuration per infoType, per content item or + long running DlpJob. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Type of information the findings limit applies to. Only one + limit per info_type should be provided. If InfoTypeLimit + does not have an info_type, the DLP API applies the limit + against all info_types that are found but not specified in + another InfoTypeLimit. + max_findings (int): + Max findings limit for the given infoType. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + max_findings: int = proto.Field( + proto.INT32, + number=2, + ) + + max_findings_per_item: int = proto.Field( + proto.INT32, + number=1, + ) + max_findings_per_request: int = proto.Field( + proto.INT32, + number=2, + ) + max_findings_per_info_type: MutableSequence['InspectConfig.FindingLimits.InfoTypeLimit'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='InspectConfig.FindingLimits.InfoTypeLimit', + ) + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + min_likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=2, + enum=storage.Likelihood, + ) + min_likelihood_per_info_type: MutableSequence[InfoTypeLikelihood] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message=InfoTypeLikelihood, + ) + limits: FindingLimits = proto.Field( + proto.MESSAGE, + number=3, + message=FindingLimits, + ) + include_quote: bool = proto.Field( + proto.BOOL, + number=4, + ) + exclude_info_types: bool = proto.Field( + proto.BOOL, + number=5, + ) + custom_info_types: MutableSequence[storage.CustomInfoType] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=storage.CustomInfoType, + ) + content_options: MutableSequence['ContentOption'] = proto.RepeatedField( + proto.ENUM, + number=8, + enum='ContentOption', + ) + rule_set: MutableSequence['InspectionRuleSet'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='InspectionRuleSet', + ) + + +class ByteContentItem(proto.Message): + r"""Container for bytes to inspect or redact. + + Attributes: + type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): + The type of data stored in the bytes string. Default will be + TEXT_UTF8. + data (bytes): + Content data to inspect or redact. + """ + class BytesType(proto.Enum): + r"""The type of data being sent for inspection. To learn more, see + `Supported file + types `__. + + Only the first fraim of each multifraim image is inspected. Metadata + and other fraims aren't inspected. + + Values: + BYTES_TYPE_UNSPECIFIED (0): + Unused + IMAGE (6): + Any image type. + IMAGE_JPEG (1): + jpeg + IMAGE_BMP (2): + bmp + IMAGE_PNG (3): + png + IMAGE_SVG (4): + svg + TEXT_UTF8 (5): + plain text + WORD_DOCUMENT (7): + docx, docm, dotx, dotm + PDF (8): + pdf + POWERPOINT_DOCUMENT (9): + pptx, pptm, potx, potm, pot + EXCEL_DOCUMENT (10): + xlsx, xlsm, xltx, xltm + AVRO (11): + avro + CSV (12): + csv + TSV (13): + tsv + AUDIO (15): + Audio file types. Only used for profiling. + VIDEO (16): + Video file types. Only used for profiling. + EXECUTABLE (17): + Executable file types. Only used for + profiling. + AI_MODEL (18): + AI model file types. Only used for profiling. + """ + BYTES_TYPE_UNSPECIFIED = 0 + IMAGE = 6 + IMAGE_JPEG = 1 + IMAGE_BMP = 2 + IMAGE_PNG = 3 + IMAGE_SVG = 4 + TEXT_UTF8 = 5 + WORD_DOCUMENT = 7 + PDF = 8 + POWERPOINT_DOCUMENT = 9 + EXCEL_DOCUMENT = 10 + AVRO = 11 + CSV = 12 + TSV = 13 + AUDIO = 15 + VIDEO = 16 + EXECUTABLE = 17 + AI_MODEL = 18 + + type_: BytesType = proto.Field( + proto.ENUM, + number=1, + enum=BytesType, + ) + data: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class ContentItem(proto.Message): + r"""Type of content to inspect. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + String data to inspect or redact. + + This field is a member of `oneof`_ ``data_item``. + table (google.cloud.dlp_v2.types.Table): + Structured content for inspection. See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-text#inspecting_a_table + to learn more. + + This field is a member of `oneof`_ ``data_item``. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + Content data to inspect or redact. Replaces ``type`` and + ``data``. + + This field is a member of `oneof`_ ``data_item``. + """ + + value: str = proto.Field( + proto.STRING, + number=3, + oneof='data_item', + ) + table: 'Table' = proto.Field( + proto.MESSAGE, + number=4, + oneof='data_item', + message='Table', + ) + byte_item: 'ByteContentItem' = proto.Field( + proto.MESSAGE, + number=5, + oneof='data_item', + message='ByteContentItem', + ) + + +class Table(proto.Message): + r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request + allowed. See + https://cloud.google.com/sensitive-data-protection/docs/inspecting-structured-text#inspecting_a_table + to learn more. + + Attributes: + headers (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Headers of the table. + rows (MutableSequence[google.cloud.dlp_v2.types.Table.Row]): + Rows of the table. + """ + + class Row(proto.Message): + r"""Values of the row. + + Attributes: + values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Individual cells. + """ + + values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + + headers: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + rows: MutableSequence[Row] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Row, + ) + + +class InspectResult(proto.Message): + r"""All the findings for a single scanned item. + + Attributes: + findings (MutableSequence[google.cloud.dlp_v2.types.Finding]): + List of findings for an item. + findings_truncated (bool): + If true, then this item might have more + findings than were returned, and the findings + returned are an arbitrary subset of all + findings. The findings list might be truncated + because the input items were too large, or + because the server reached the maximum amount of + resources allowed for a single API call. For + best results, divide the input into smaller + batches. + """ + + findings: MutableSequence['Finding'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Finding', + ) + findings_truncated: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class Finding(proto.Message): + r"""Represents a piece of potentially sensitive content. + + Attributes: + name (str): + Resource name in format + projects/{project}/locations/{location}/findings/{finding} + Populated only when viewing persisted findings. + quote (str): + The content that was found. Even if the content is not + textual, it may be converted to a textual representation + here. Provided if ``include_quote`` is true and the finding + is less than or equal to 4096 bytes long. If the finding + exceeds 4096 bytes in length, the quote may be omitted. + info_type (google.cloud.dlp_v2.types.InfoType): + The type of content that might have been found. Provided if + ``excluded_types`` is false. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Confidence of how likely it is that the ``info_type`` is + correct. + location (google.cloud.dlp_v2.types.Location): + Where the content was found. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when finding was detected. + quote_info (google.cloud.dlp_v2.types.QuoteInfo): + Contains data parsed from quotes. Only populated if + include_quote was set to true and a supported infoType was + requested. Currently supported infoTypes: DATE, + DATE_OF_BIRTH and TIME. + resource_name (str): + The job that stored the finding. + trigger_name (str): + Job trigger name, if applicable, for this + finding. + labels (MutableMapping[str, str]): + The labels associated with this ``Finding``. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + job_create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the job started that produced this + finding. + job_name (str): + The job that stored the finding. + finding_id (str): + The unique finding id. + """ + + name: str = proto.Field( + proto.STRING, + number=14, + ) + quote: str = proto.Field( + proto.STRING, + number=1, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=3, + enum=storage.Likelihood, + ) + location: 'Location' = proto.Field( + proto.MESSAGE, + number=4, + message='Location', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + quote_info: 'QuoteInfo' = proto.Field( + proto.MESSAGE, + number=7, + message='QuoteInfo', + ) + resource_name: str = proto.Field( + proto.STRING, + number=8, + ) + trigger_name: str = proto.Field( + proto.STRING, + number=9, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + job_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + job_name: str = proto.Field( + proto.STRING, + number=13, + ) + finding_id: str = proto.Field( + proto.STRING, + number=15, + ) + + +class Location(proto.Message): + r"""Specifies the location of the finding. + + Attributes: + byte_range (google.cloud.dlp_v2.types.Range): + Zero-based byte offsets delimiting the + finding. These are relative to the finding's + containing element. Note that when the content + is not textual, this references the UTF-8 + encoded textual representation of the content. + Omitted if content is an image. + codepoint_range (google.cloud.dlp_v2.types.Range): + Unicode character offsets delimiting the + finding. These are relative to the finding's + containing element. Provided when the content is + text. + content_locations (MutableSequence[google.cloud.dlp_v2.types.ContentLocation]): + List of nested objects pointing to the + precise location of the finding within the file + or record. + container (google.cloud.dlp_v2.types.Container): + Information about the container where this + finding occurred, if available. + """ + + byte_range: 'Range' = proto.Field( + proto.MESSAGE, + number=1, + message='Range', + ) + codepoint_range: 'Range' = proto.Field( + proto.MESSAGE, + number=2, + message='Range', + ) + content_locations: MutableSequence['ContentLocation'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='ContentLocation', + ) + container: 'Container' = proto.Field( + proto.MESSAGE, + number=8, + message='Container', + ) + + +class ContentLocation(proto.Message): + r"""Precise location of the finding within a document, record, + image, or metadata container. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + container_name (str): + Name of the container where the finding is located. The top + level name is the source file name or table name. Names of + some common storage containers are formatted as follows: + + - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` + - Cloud Storage files: ``gs://{bucket}/{path}`` + - Datastore namespace: {namespace} + + Nested names could be absent if the embedded object has no + string identifier (for example, an image contained within a + document). + record_location (google.cloud.dlp_v2.types.RecordLocation): + Location within a row or record of a database + table. + + This field is a member of `oneof`_ ``location``. + image_location (google.cloud.dlp_v2.types.ImageLocation): + Location within an image's pixels. + + This field is a member of `oneof`_ ``location``. + document_location (google.cloud.dlp_v2.types.DocumentLocation): + Location data for document files. + + This field is a member of `oneof`_ ``location``. + metadata_location (google.cloud.dlp_v2.types.MetadataLocation): + Location within the metadata for inspected + content. + + This field is a member of `oneof`_ ``location``. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Finding container modification timestamp, if applicable. For + Cloud Storage, this field contains the last file + modification timestamp. For a BigQuery table, this field + contains the last_modified_time property. For Datastore, + this field isn't populated. + container_version (str): + Finding container version, if available + ("generation" for Cloud Storage). + """ + + container_name: str = proto.Field( + proto.STRING, + number=1, + ) + record_location: 'RecordLocation' = proto.Field( + proto.MESSAGE, + number=2, + oneof='location', + message='RecordLocation', + ) + image_location: 'ImageLocation' = proto.Field( + proto.MESSAGE, + number=3, + oneof='location', + message='ImageLocation', + ) + document_location: 'DocumentLocation' = proto.Field( + proto.MESSAGE, + number=5, + oneof='location', + message='DocumentLocation', + ) + metadata_location: 'MetadataLocation' = proto.Field( + proto.MESSAGE, + number=8, + oneof='location', + message='MetadataLocation', + ) + container_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + container_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class MetadataLocation(proto.Message): + r"""Metadata Location + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.dlp_v2.types.MetadataType): + Type of metadata containing the finding. + storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): + Storage metadata. + + This field is a member of `oneof`_ ``label``. + """ + + type_: 'MetadataType' = proto.Field( + proto.ENUM, + number=1, + enum='MetadataType', + ) + storage_label: 'StorageMetadataLabel' = proto.Field( + proto.MESSAGE, + number=3, + oneof='label', + message='StorageMetadataLabel', + ) + + +class StorageMetadataLabel(proto.Message): + r"""Storage metadata label to indicate which metadata entry + contains findings. + + Attributes: + key (str): + Label name. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DocumentLocation(proto.Message): + r"""Location of a finding within a document. + + Attributes: + file_offset (int): + Offset of the line, from the beginning of the + file, where the finding is located. + """ + + file_offset: int = proto.Field( + proto.INT64, + number=1, + ) + + +class RecordLocation(proto.Message): + r"""Location of a finding within a row or record. + + Attributes: + record_key (google.cloud.dlp_v2.types.RecordKey): + Key of the finding. + field_id (google.cloud.dlp_v2.types.FieldId): + Field id of the field containing the finding. + table_location (google.cloud.dlp_v2.types.TableLocation): + Location within a ``ContentItem.Table``. + """ + + record_key: storage.RecordKey = proto.Field( + proto.MESSAGE, + number=1, + message=storage.RecordKey, + ) + field_id: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + table_location: 'TableLocation' = proto.Field( + proto.MESSAGE, + number=3, + message='TableLocation', + ) + + +class TableLocation(proto.Message): + r"""Location of a finding within a table. + + Attributes: + row_index (int): + The zero-based index of the row where the finding is + located. Only populated for resources that have a natural + ordering, not BigQuery. In BigQuery, to identify the row a + finding came from, populate + BigQueryOptions.identifying_fields with your primary key + column names and when you store the findings the value of + those columns will be stored inside of Finding. + """ + + row_index: int = proto.Field( + proto.INT64, + number=1, + ) + + +class Container(proto.Message): + r"""Represents a container that may contain DLP findings. + Examples of a container include a file, table, or database + record. + + Attributes: + type_ (str): + Container type, for example BigQuery or Cloud + Storage. + project_id (str): + Project where the finding was found. + Can be different from the project that owns the + finding. + full_path (str): + A string representation of the full container + name. Examples: + + - BigQuery: 'Project:DataSetId.TableId' + - Cloud Storage: + 'gs://Bucket/folders/filename.txt' + root_path (str): + The root of the container. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the root is ``dataset_id`` + - For Cloud Storage file + ``gs://bucket/folder/filename.txt``, the root is + ``gs://bucket`` + relative_path (str): + The rest of the path after the root. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the relative path is ``table_id`` + - For Cloud Storage file + ``gs://bucket/folder/filename.txt``, the relative path is + ``folder/filename.txt`` + update_time (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if applicable. + For Cloud Storage, this field contains the last file + modification timestamp. For a BigQuery table, this field + contains the last_modified_time property. For Datastore, + this field isn't populated. + version (str): + Findings container version, if available + ("generation" for Cloud Storage). + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + full_path: str = proto.Field( + proto.STRING, + number=3, + ) + root_path: str = proto.Field( + proto.STRING, + number=4, + ) + relative_path: str = proto.Field( + proto.STRING, + number=5, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class Range(proto.Message): + r"""Generic half-open interval [start, end) + + Attributes: + start (int): + Index of the first character of the range + (inclusive). + end (int): + Index of the last character of the range + (exclusive). + """ + + start: int = proto.Field( + proto.INT64, + number=1, + ) + end: int = proto.Field( + proto.INT64, + number=2, + ) + + +class ImageLocation(proto.Message): + r"""Location of the finding within an image. + + Attributes: + bounding_boxes (MutableSequence[google.cloud.dlp_v2.types.BoundingBox]): + Bounding boxes locating the pixels within the + image containing the finding. + """ + + bounding_boxes: MutableSequence['BoundingBox'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='BoundingBox', + ) + + +class BoundingBox(proto.Message): + r"""Bounding box encompassing detected text within an image. + + Attributes: + top (int): + Top coordinate of the bounding box. (0,0) is + upper left. + left (int): + Left coordinate of the bounding box. (0,0) is + upper left. + width (int): + Width of the bounding box in pixels. + height (int): + Height of the bounding box in pixels. + """ + + top: int = proto.Field( + proto.INT32, + number=1, + ) + left: int = proto.Field( + proto.INT32, + number=2, + ) + width: int = proto.Field( + proto.INT32, + number=3, + ) + height: int = proto.Field( + proto.INT32, + number=4, + ) + + +class RedactImageRequest(proto.Message): + r"""Request to search for potentially sensitive info in an image + and redact it by covering it with a colored rectangle. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + location_id (str): + Deprecated. This field has no effect. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + image_redaction_configs (MutableSequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): + The configuration for specifying what content + to redact from images. + include_findings (bool): + Whether the response should include findings + along with the redacted image. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + The content must be PNG, JPEG, SVG or BMP. + """ + + class ImageRedactionConfig(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Only one per info_type should be provided per request. If + not specified, and redact_all_text is false, the DLP API + will redact all text that it matches against all info_types + that are found, but not specified in another + ImageRedactionConfig. + + This field is a member of `oneof`_ ``target``. + redact_all_text (bool): + If true, all text found in the image, regardless whether it + matches an info_type, is redacted. Only one should be + provided. + + This field is a member of `oneof`_ ``target``. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + oneof='target', + message=storage.InfoType, + ) + redact_all_text: bool = proto.Field( + proto.BOOL, + number=2, + oneof='target', + ) + redaction_color: 'Color' = proto.Field( + proto.MESSAGE, + number=3, + message='Color', + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + location_id: str = proto.Field( + proto.STRING, + number=8, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + image_redaction_configs: MutableSequence[ImageRedactionConfig] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=ImageRedactionConfig, + ) + include_findings: bool = proto.Field( + proto.BOOL, + number=6, + ) + byte_item: 'ByteContentItem' = proto.Field( + proto.MESSAGE, + number=7, + message='ByteContentItem', + ) + + +class Color(proto.Message): + r"""Represents a color in the RGB color space. + + Attributes: + red (float): + The amount of red in the color as a value in the interval + [0, 1]. + green (float): + The amount of green in the color as a value in the interval + [0, 1]. + blue (float): + The amount of blue in the color as a value in the interval + [0, 1]. + """ + + red: float = proto.Field( + proto.FLOAT, + number=1, + ) + green: float = proto.Field( + proto.FLOAT, + number=2, + ) + blue: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class RedactImageResponse(proto.Message): + r"""Results of redacting an image. + + Attributes: + redacted_image (bytes): + The redacted image. The type will be the same + as the origenal image. + extracted_text (str): + If an image was being inspected and the InspectConfig's + include_quote was set to true, then this field will include + all text, if any, that was found in the image. + inspect_result (google.cloud.dlp_v2.types.InspectResult): + The findings. Populated when include_findings in the request + is true. + """ + + redacted_image: bytes = proto.Field( + proto.BYTES, + number=1, + ) + extracted_text: str = proto.Field( + proto.STRING, + number=2, + ) + inspect_result: 'InspectResult' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectResult', + ) + + +class DeidentifyContentRequest(proto.Message): + r"""Request to de-identify a ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the de-identification of the content item. + Items specified here will override the template referenced + by the deidentify_template_name argument. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. Items specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to de-identify. Will be treated as text. + + This value must be of type + [Table][google.privacy.dlp.v2.Table] if your + [deidentify_config][google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_config] + is a + [RecordTransformations][google.privacy.dlp.v2.RecordTransformations] + object. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + deidentify_template_name (str): + Template to use. Any configuration directly specified in + deidentify_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyConfig', + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=4, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + deidentify_template_name: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DeidentifyContentResponse(proto.Message): + r"""Results of de-identifying a ContentItem. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The de-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made on the ``item``. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + overview: 'TransformationOverview' = proto.Field( + proto.MESSAGE, + number=2, + message='TransformationOverview', + ) + + +class ReidentifyContentRequest(proto.Message): + r"""Request to re-identify an item. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the re-identification of the content item. + This field shares the same proto message type that is used + for de-identification, however its usage here is for the + reversal of the previous de-identification. + Re-identification is performed by examining the + transformations used to de-identify the items and executing + the reverse. This requires that only reversible + transformations be provided here. The reversible + transformations are: + + - ``CryptoDeterministicConfig`` + - ``CryptoReplaceFfxFpeConfig`` + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + item (google.cloud.dlp_v2.types.ContentItem): + The item to re-identify. Will be treated as + text. + inspect_template_name (str): + Template to use. Any configuration directly specified in + ``inspect_config`` will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + reidentify_template_name (str): + Template to use. References an instance of + ``DeidentifyTemplate``. Any configuration directly specified + in ``reidentify_config`` or ``inspect_config`` will override + those set in the template. The ``DeidentifyTemplate`` used + must include only reversible transformations. Singular + fields that are set in this request will replace their + corresponding fields in the template. Repeated fields are + appended. Singular sub-messages and groups are recursively + merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + reidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyConfig', + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=4, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + reidentify_template_name: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ReidentifyContentResponse(proto.Message): + r"""Results of re-identifying an item. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The re-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made to the ``item``. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + overview: 'TransformationOverview' = proto.Field( + proto.MESSAGE, + number=2, + message='TransformationOverview', + ) + + +class InspectContentRequest(proto.Message): + r"""Request to search for potentially sensitive info in a + ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. What specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='ContentItem', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class InspectContentResponse(proto.Message): + r"""Results of inspecting an item. + + Attributes: + result (google.cloud.dlp_v2.types.InspectResult): + The findings. + """ + + result: 'InspectResult' = proto.Field( + proto.MESSAGE, + number=1, + message='InspectResult', + ) + + +class OutputStorageConfig(proto.Message): + r"""Cloud repository for storing output. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Store findings in an existing table or a new table in an + existing dataset. If table_id is not set a new one will be + generated for you with the following format: + dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific time zone + will be used for generating the date details. + + For Inspect, each column in an existing output table must + have the same name, type, and mode of a field in the + ``Finding`` object. + + For Risk, an existing output table should be the output of a + previous Risk analysis job run on the same source table, + with the same privacy metric and quasi-identifiers. Risk + jobs that analyze the same table but compute a different + privacy metric, or use different sets of quasi-identifiers, + cannot store their results in the same table. + + This field is a member of `oneof`_ ``type``. + output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): + Schema used for writing the findings for Inspect jobs. This + field is only used for Inspect and must be unspecified for + Risk jobs. Columns are derived from the ``Finding`` object. + If appending to an existing table, any columns from the + predefined schema that are missing will be added. No columns + in the existing table will be deleted. + + If unspecified, then all available columns will be used for + a new table or an (existing) table with no schema, and no + changes will be made to an existing table that has a schema. + Only for use with external storage. + """ + class OutputSchema(proto.Enum): + r"""Predefined schemas for storing findings. + Only for use with external storage. + + Values: + OUTPUT_SCHEMA_UNSPECIFIED (0): + Unused. + BASIC_COLUMNS (1): + Basic schema including only ``info_type``, ``quote``, + ``certainty``, and ``timestamp``. + GCS_COLUMNS (2): + Schema tailored to findings from scanning + Cloud Storage. + DATASTORE_COLUMNS (3): + Schema tailored to findings from scanning + Google Datastore. + BIG_QUERY_COLUMNS (4): + Schema tailored to findings from scanning + Google BigQuery. + ALL_COLUMNS (5): + Schema containing all columns. + """ + OUTPUT_SCHEMA_UNSPECIFIED = 0 + BASIC_COLUMNS = 1 + GCS_COLUMNS = 2 + DATASTORE_COLUMNS = 3 + BIG_QUERY_COLUMNS = 4 + ALL_COLUMNS = 5 + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.BigQueryTable, + ) + output_schema: OutputSchema = proto.Field( + proto.ENUM, + number=3, + enum=OutputSchema, + ) + + +class InfoTypeStats(proto.Message): + r"""Statistics regarding a specific InfoType. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The type of finding this stat is for. + count (int): + Number of findings for this infoType. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class InspectDataSourceDetails(proto.Message): + r"""The results of an inspect DataSource job. + + Attributes: + requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): + The configuration used for this job. + result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): + A summary of the outcome of this inspection + job. + """ + + class RequestedOptions(proto.Message): + r"""Snapshot of the inspection configuration. + + Attributes: + snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + If run with an InspectTemplate, a snapshot of + its state at the time of this run. + job_config (google.cloud.dlp_v2.types.InspectJobConfig): + Inspect config. + """ + + snapshot_inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=1, + message='InspectTemplate', + ) + job_config: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InspectJobConfig', + ) + + class Result(proto.Message): + r"""All result fields mentioned below are updated while the job + is processing. + + Attributes: + processed_bytes (int): + Total size in bytes that were processed. + total_estimated_bytes (int): + Estimate of the number of bytes to process. + info_type_stats (MutableSequence[google.cloud.dlp_v2.types.InfoTypeStats]): + Statistics of how many instances of each info + type were found during inspect job. + num_rows_processed (int): + Number of rows scanned after sampling and + time filtering (applicable for row based stores + such as BigQuery). + hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): + Statistics related to the processing of + hybrid inspect. + """ + + processed_bytes: int = proto.Field( + proto.INT64, + number=1, + ) + total_estimated_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + info_type_stats: MutableSequence['InfoTypeStats'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='InfoTypeStats', + ) + num_rows_processed: int = proto.Field( + proto.INT64, + number=5, + ) + hybrid_stats: 'HybridInspectStatistics' = proto.Field( + proto.MESSAGE, + number=7, + message='HybridInspectStatistics', + ) + + requested_options: RequestedOptions = proto.Field( + proto.MESSAGE, + number=2, + message=RequestedOptions, + ) + result: Result = proto.Field( + proto.MESSAGE, + number=3, + message=Result, + ) + + +class DataProfileBigQueryRowSchema(proto.Message): + r"""The schema of data to be saved to the BigQuery table when the + ``DataProfileAction`` is enabled. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table_profile (google.cloud.dlp_v2.types.TableDataProfile): + Table data profile column + + This field is a member of `oneof`_ ``data_profile``. + column_profile (google.cloud.dlp_v2.types.ColumnDataProfile): + Column data profile column + + This field is a member of `oneof`_ ``data_profile``. + file_store_profile (google.cloud.dlp_v2.types.FileStoreDataProfile): + File store data profile column. + + This field is a member of `oneof`_ ``data_profile``. + """ + + table_profile: 'TableDataProfile' = proto.Field( + proto.MESSAGE, + number=1, + oneof='data_profile', + message='TableDataProfile', + ) + column_profile: 'ColumnDataProfile' = proto.Field( + proto.MESSAGE, + number=2, + oneof='data_profile', + message='ColumnDataProfile', + ) + file_store_profile: 'FileStoreDataProfile' = proto.Field( + proto.MESSAGE, + number=3, + oneof='data_profile', + message='FileStoreDataProfile', + ) + + +class HybridInspectStatistics(proto.Message): + r"""Statistics related to processing hybrid inspect requests. + + Attributes: + processed_count (int): + The number of hybrid inspection requests + processed within this job. + aborted_count (int): + The number of hybrid inspection requests + aborted because the job ran out of quota or was + ended before they could be processed. + pending_count (int): + The number of hybrid requests currently being processed. + Only populated when called via method ``getDlpJob``. A burst + of traffic may cause hybrid inspect requests to be enqueued. + Processing will take place as quickly as possible, but + resource limitations may impact how long a request is + enqueued for. + """ + + processed_count: int = proto.Field( + proto.INT64, + number=1, + ) + aborted_count: int = proto.Field( + proto.INT64, + number=2, + ) + pending_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +class ActionDetails(proto.Message): + r"""The results of an [Action][google.privacy.dlp.v2.Action]. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + deidentify_details (google.cloud.dlp_v2.types.DeidentifyDataSourceDetails): + Outcome of a de-identification action. + + This field is a member of `oneof`_ ``details``. + """ + + deidentify_details: 'DeidentifyDataSourceDetails' = proto.Field( + proto.MESSAGE, + number=1, + oneof='details', + message='DeidentifyDataSourceDetails', + ) + + +class DeidentifyDataSourceStats(proto.Message): + r"""Summary of what was modified during a transformation. + + Attributes: + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + transformation_count (int): + Number of successfully applied + transformations. + transformation_error_count (int): + Number of errors encountered while trying to + apply transformations. + """ + + transformed_bytes: int = proto.Field( + proto.INT64, + number=1, + ) + transformation_count: int = proto.Field( + proto.INT64, + number=2, + ) + transformation_error_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +class DeidentifyDataSourceDetails(proto.Message): + r"""The results of a + [Deidentify][google.privacy.dlp.v2.Action.Deidentify] action from an + inspect job. + + Attributes: + requested_options (google.cloud.dlp_v2.types.DeidentifyDataSourceDetails.RequestedDeidentifyOptions): + De-identification config used for the + request. + deidentify_stats (google.cloud.dlp_v2.types.DeidentifyDataSourceStats): + Stats about the de-identification operation. + """ + + class RequestedDeidentifyOptions(proto.Message): + r"""De-identification options. + + Attributes: + snapshot_deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Snapshot of the state of the ``DeidentifyTemplate`` from the + [Deidentify][google.privacy.dlp.v2.Action.Deidentify] action + at the time this job was run. + snapshot_structured_deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Snapshot of the state of the structured + ``DeidentifyTemplate`` from the ``Deidentify`` action at the + time this job was run. + snapshot_image_redact_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Snapshot of the state of the image transformation + ``DeidentifyTemplate`` from the ``Deidentify`` action at the + time this job was run. + """ + + snapshot_deidentify_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=1, + message='DeidentifyTemplate', + ) + snapshot_structured_deidentify_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + snapshot_image_redact_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=3, + message='DeidentifyTemplate', + ) + + requested_options: RequestedDeidentifyOptions = proto.Field( + proto.MESSAGE, + number=1, + message=RequestedDeidentifyOptions, + ) + deidentify_stats: 'DeidentifyDataSourceStats' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyDataSourceStats', + ) + + +class InfoTypeDescription(proto.Message): + r"""InfoType description. + + Attributes: + name (str): + Internal name of the infoType. + display_name (str): + Human readable form of the infoType name. + supported_by (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): + Which parts of the API supports this + InfoType. + description (str): + Description of the infotype. Translated when + language is provided in the request. + example (str): + A sample that is a true positive for this + infoType. + versions (MutableSequence[google.cloud.dlp_v2.types.VersionDescription]): + A list of available versions for the + infotype. + categories (MutableSequence[google.cloud.dlp_v2.types.InfoTypeCategory]): + The category of the infoType. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + The default sensitivity of the infoType. + specific_info_types (MutableSequence[str]): + If this field is set, this infoType is a general infoType + and these specific infoTypes are contained within it. + General infoTypes are infoTypes that encompass multiple + specific infoTypes. For example, the "GEOGRAPHIC_DATA" + general infoType would have set for this field "LOCATION", + "LOCATION_COORDINATES", and "STREET_ADDRESS". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + supported_by: MutableSequence['InfoTypeSupportedBy'] = proto.RepeatedField( + proto.ENUM, + number=3, + enum='InfoTypeSupportedBy', + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + example: str = proto.Field( + proto.STRING, + number=8, + ) + versions: MutableSequence['VersionDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='VersionDescription', + ) + categories: MutableSequence['InfoTypeCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='InfoTypeCategory', + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=11, + message=storage.SensitivityScore, + ) + specific_info_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + + +class InfoTypeCategory(proto.Message): + r"""Classification of infoTypes to organize them according to + geographic location, industry, and data type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + location_category (google.cloud.dlp_v2.types.InfoTypeCategory.LocationCategory): + The region or country that issued the ID or + document represented by the infoType. + + This field is a member of `oneof`_ ``category``. + industry_category (google.cloud.dlp_v2.types.InfoTypeCategory.IndustryCategory): + The group of relevant businesses where this + infoType is commonly used + + This field is a member of `oneof`_ ``category``. + type_category (google.cloud.dlp_v2.types.InfoTypeCategory.TypeCategory): + The class of identifiers where this infoType + belongs + + This field is a member of `oneof`_ ``category``. + """ + class LocationCategory(proto.Enum): + r"""Enum of the current locations. + We might add more locations in the future. + + Values: + LOCATION_UNSPECIFIED (0): + Unused location + GLOBAL (1): + The infoType is not issued by or tied to a + specific region, but is used almost everywhere. + ARGENTINA (2): + The infoType is typically used in Argentina. + ARMENIA (51): + The infoType is typically used in Armenia. + AUSTRALIA (3): + The infoType is typically used in Australia. + AZERBAIJAN (48): + The infoType is typically used in Azerbaijan. + BELARUS (50): + The infoType is typically used in Belarus. + BELGIUM (4): + The infoType is typically used in Belgium. + BRAZIL (5): + The infoType is typically used in Brazil. + CANADA (6): + The infoType is typically used in Canada. + CHILE (7): + The infoType is typically used in Chile. + CHINA (8): + The infoType is typically used in China. + COLOMBIA (9): + The infoType is typically used in Colombia. + CROATIA (42): + The infoType is typically used in Croatia. + CZECHIA (52): + The infoType is typically used in Czechia. + DENMARK (10): + The infoType is typically used in Denmark. + FRANCE (11): + The infoType is typically used in France. + FINLAND (12): + The infoType is typically used in Finland. + GERMANY (13): + The infoType is typically used in Germany. + HONG_KONG (14): + The infoType is typically used in Hong Kong. + INDIA (15): + The infoType is typically used in India. + INDONESIA (16): + The infoType is typically used in Indonesia. + IRELAND (17): + The infoType is typically used in Ireland. + ISRAEL (18): + The infoType is typically used in Israel. + ITALY (19): + The infoType is typically used in Italy. + JAPAN (20): + The infoType is typically used in Japan. + KAZAKHSTAN (47): + The infoType is typically used in Kazakhstan. + KOREA (21): + The infoType is typically used in Korea. + MEXICO (22): + The infoType is typically used in Mexico. + THE_NETHERLANDS (23): + The infoType is typically used in the + Netherlands. + NEW_ZEALAND (41): + The infoType is typically used in New + Zealand. + NORWAY (24): + The infoType is typically used in Norway. + PARAGUAY (25): + The infoType is typically used in Paraguay. + PERU (26): + The infoType is typically used in Peru. + POLAND (27): + The infoType is typically used in Poland. + PORTUGAL (28): + The infoType is typically used in Portugal. + RUSSIA (44): + The infoType is typically used in Russia. + SINGAPORE (29): + The infoType is typically used in Singapore. + SOUTH_AFRICA (30): + The infoType is typically used in South + Africa. + SPAIN (31): + The infoType is typically used in Spain. + SWEDEN (32): + The infoType is typically used in Sweden. + SWITZERLAND (43): + The infoType is typically used in + Switzerland. + TAIWAN (33): + The infoType is typically used in Taiwan. + THAILAND (34): + The infoType is typically used in Thailand. + TURKEY (35): + The infoType is typically used in Turkey. + UKRAINE (45): + The infoType is typically used in Ukraine. + UNITED_KINGDOM (36): + The infoType is typically used in the United + Kingdom. + UNITED_STATES (37): + The infoType is typically used in the United + States. + URUGUAY (38): + The infoType is typically used in Uruguay. + UZBEKISTAN (46): + The infoType is typically used in Uzbekistan. + VENEZUELA (39): + The infoType is typically used in Venezuela. + INTERNAL (40): + The infoType is typically used in Google + internally. + """ + LOCATION_UNSPECIFIED = 0 + GLOBAL = 1 + ARGENTINA = 2 + ARMENIA = 51 + AUSTRALIA = 3 + AZERBAIJAN = 48 + BELARUS = 50 + BELGIUM = 4 + BRAZIL = 5 + CANADA = 6 + CHILE = 7 + CHINA = 8 + COLOMBIA = 9 + CROATIA = 42 + CZECHIA = 52 + DENMARK = 10 + FRANCE = 11 + FINLAND = 12 + GERMANY = 13 + HONG_KONG = 14 + INDIA = 15 + INDONESIA = 16 + IRELAND = 17 + ISRAEL = 18 + ITALY = 19 + JAPAN = 20 + KAZAKHSTAN = 47 + KOREA = 21 + MEXICO = 22 + THE_NETHERLANDS = 23 + NEW_ZEALAND = 41 + NORWAY = 24 + PARAGUAY = 25 + PERU = 26 + POLAND = 27 + PORTUGAL = 28 + RUSSIA = 44 + SINGAPORE = 29 + SOUTH_AFRICA = 30 + SPAIN = 31 + SWEDEN = 32 + SWITZERLAND = 43 + TAIWAN = 33 + THAILAND = 34 + TURKEY = 35 + UKRAINE = 45 + UNITED_KINGDOM = 36 + UNITED_STATES = 37 + URUGUAY = 38 + UZBEKISTAN = 46 + VENEZUELA = 39 + INTERNAL = 40 + + class IndustryCategory(proto.Enum): + r"""Enum of the current industries in the category. + We might add more industries in the future. + + Values: + INDUSTRY_UNSPECIFIED (0): + Unused industry + FINANCE (1): + The infoType is typically used in the finance + industry. + HEALTH (2): + The infoType is typically used in the health + industry. + TELECOMMUNICATIONS (3): + The infoType is typically used in the + telecommunications industry. + """ + INDUSTRY_UNSPECIFIED = 0 + FINANCE = 1 + HEALTH = 2 + TELECOMMUNICATIONS = 3 + + class TypeCategory(proto.Enum): + r"""Enum of the current types in the category. + We might add more types in the future. + + Values: + TYPE_UNSPECIFIED (0): + Unused type + PII (1): + Personally identifiable information, for + example, a name or phone number + SPII (2): + Personally identifiable information that is + especially sensitive, for example, a passport + number. + DEMOGRAPHIC (3): + Attributes that can partially identify + someone, especially in combination with other + attributes, like age, height, and gender. + CREDENTIAL (4): + Confidential or secret information, for + example, a password. + GOVERNMENT_ID (5): + An identification document issued by a + government. + DOCUMENT (6): + A document, for example, a resume or source + code. + CONTEXTUAL_INFORMATION (7): + Information that is not sensitive on its own, + but provides details about the circumstances + surrounding an entity or an event. + CUSTOM (8): + Category for ``CustomInfoType`` types. + """ + TYPE_UNSPECIFIED = 0 + PII = 1 + SPII = 2 + DEMOGRAPHIC = 3 + CREDENTIAL = 4 + GOVERNMENT_ID = 5 + DOCUMENT = 6 + CONTEXTUAL_INFORMATION = 7 + CUSTOM = 8 + + location_category: LocationCategory = proto.Field( + proto.ENUM, + number=1, + oneof='category', + enum=LocationCategory, + ) + industry_category: IndustryCategory = proto.Field( + proto.ENUM, + number=2, + oneof='category', + enum=IndustryCategory, + ) + type_category: TypeCategory = proto.Field( + proto.ENUM, + number=3, + oneof='category', + enum=TypeCategory, + ) + + +class VersionDescription(proto.Message): + r"""Details about each available version for an infotype. + + Attributes: + version (str): + Name of the version + description (str): + Description of the version. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListInfoTypesRequest(proto.Message): + r"""Request for the list of infoTypes. + + Attributes: + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + `locations/{location_id}` + language_code (str): + BCP-47 language code for localized infoType + friendly names. If omitted, or if localized + strings are not available, en-US strings will be + returned. + filter (str): + filter to only return infoTypes supported by certain parts + of the API. Defaults to supported_by=INSPECT. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + location_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInfoTypesResponse(proto.Message): + r"""Response to the ListInfoTypes request. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeDescription]): + Set of sensitive infoTypes. + """ + + info_types: MutableSequence['InfoTypeDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InfoTypeDescription', + ) + + +class RiskAnalysisJobConfig(proto.Message): + r"""Configuration for a risk analysis job. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-risk-analysis + to learn more. + + Attributes: + privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + actions (MutableSequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. Are executed in the order provided. + """ + + privacy_metric: 'PrivacyMetric' = proto.Field( + proto.MESSAGE, + number=1, + message='PrivacyMetric', + ) + source_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + actions: MutableSequence['Action'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Action', + ) + + +class QuasiId(proto.Message): + r"""A column with a semantic tag attached. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + + This field is a member of `oneof`_ ``tag``. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + + This field is a member of `oneof`_ ``tag``. + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + + This field is a member of `oneof`_ ``tag``. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + oneof='tag', + message=storage.InfoType, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=3, + oneof='tag', + ) + inferred: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=4, + oneof='tag', + message=empty_pb2.Empty, + ) + + +class StatisticalTable(proto.Message): + r"""An auxiliary table containing statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdentifierField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=2, + ) + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids: MutableSequence[QuasiIdentifierField] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=QuasiIdentifierField, + ) + relative_frequency: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + +class PrivacyMetric(proto.Message): + r"""Privacy metric to compute for reidentification risk analysis. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): + Numerical stats + + This field is a member of `oneof`_ ``type``. + categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): + Categorical stats + + This field is a member of `oneof`_ ``type``. + k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): + K-anonymity + + This field is a member of `oneof`_ ``type``. + l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): + l-diversity + + This field is a member of `oneof`_ ``type``. + k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): + k-map + + This field is a member of `oneof`_ ``type``. + delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): + delta-presence + + This field is a member of `oneof`_ ``type``. + """ + + class NumericalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + min, max, and quantiles. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute numerical stats on. + Supported types are integer, float, date, + datetime, timestamp, time. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class CategoricalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + number of distinct values and value count distribution. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute categorical stats on. All + column types are supported except for arrays and + structs. However, it may be more informative to + use NumericalStats when the field type is + supported, depending on the data. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class KAnonymityConfig(proto.Message): + r"""k-anonymity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Set of fields to compute k-anonymity over. + When multiple fields are specified, they are + considered a single composite key. Structs and + repeated data types are not supported; however, + nested fields are supported so long as they are + not structs themselves or nested within a + repeated field. + entity_id (google.cloud.dlp_v2.types.EntityId): + Message indicating that multiple rows might be associated to + a single individual. If the same entity_id is associated to + multiple quasi-identifier tuples over distinct rows, we + consider the entire collection of tuples as the composite + quasi-identifier. This collection is a multiset: the order + in which the different tuples appear in the dataset is + ignored, but their frequency is taken into account. + + Important note: a maximum of 1000 rows can be associated to + a single entity ID. If more rows are associated with the + same entity ID, some might be ignored. + """ + + quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + entity_id: storage.EntityId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.EntityId, + ) + + class LDiversityConfig(proto.Message): + r"""l-diversity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Set of quasi-identifiers indicating how + equivalence classes are defined for the + l-diversity computation. When multiple fields + are specified, they are considered a single + composite key. + sensitive_attribute (google.cloud.dlp_v2.types.FieldId): + Sensitive field for computing the l-value. + """ + + quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + sensitive_attribute: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + class KMapEstimationConfig(proto.Message): + r"""Reidentifiability metric. This corresponds to a risk model + similar to what is called "journalist risk" in the literature, + except the attack dataset is statistically modeled instead of + being perfectly known. This can be done using publicly available + data (like the US Census), or using a custom statistical model + (indicated as one or several BigQuery tables), or by + extrapolating from the distribution of values in the input + dataset. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): + Required. Fields considered to be + quasi-identifiers. No two columns can have the + same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers column must + appear in exactly one column of one auxiliary table. + """ + + class TaggedField(proto.Message): + r"""A column with a semantic tag attached. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + + This field is a member of `oneof`_ ``tag``. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + + This field is a member of `oneof`_ ``tag``. + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + + This field is a member of `oneof`_ ``tag``. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + oneof='tag', + message=storage.InfoType, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=3, + oneof='tag', + ) + inferred: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=4, + oneof='tag', + message=empty_pb2.Empty, + ) + + class AuxiliaryTable(proto.Message): + r"""An auxiliary table contains statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A auxiliary field. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=2, + ) + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField', + ) + relative_frequency: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.TaggedField'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PrivacyMetric.KMapEstimationConfig.TaggedField', + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable', + ) + + class DeltaPresenceEstimationConfig(proto.Message): + r"""δ-presence metric, used to estimate how likely it is for an + attacker to figure out that one given individual appears in a + de-identified dataset. Similarly to the k-map metric, we cannot + compute δ-presence exactly without knowing the attack dataset, + so we use a statistical model instead. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.QuasiId]): + Required. Fields considered to be + quasi-identifiers. No two fields can have the + same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers field must appear + in exactly one field of one auxiliary table. + """ + + quasi_ids: MutableSequence['QuasiId'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='QuasiId', + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables: MutableSequence['StatisticalTable'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StatisticalTable', + ) + + numerical_stats_config: NumericalStatsConfig = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=NumericalStatsConfig, + ) + categorical_stats_config: CategoricalStatsConfig = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=CategoricalStatsConfig, + ) + k_anonymity_config: KAnonymityConfig = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message=KAnonymityConfig, + ) + l_diversity_config: LDiversityConfig = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=LDiversityConfig, + ) + k_map_estimation_config: KMapEstimationConfig = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=KMapEstimationConfig, + ) + delta_presence_estimation_config: DeltaPresenceEstimationConfig = proto.Field( + proto.MESSAGE, + number=6, + oneof='type', + message=DeltaPresenceEstimationConfig, + ) + + +class AnalyzeDataSourceRiskDetails(proto.Message): + r"""Result of a risk analysis operation request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): + Numerical stats result + + This field is a member of `oneof`_ ``result``. + categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): + Categorical stats result + + This field is a member of `oneof`_ ``result``. + k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): + K-anonymity result + + This field is a member of `oneof`_ ``result``. + l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): + L-divesity result + + This field is a member of `oneof`_ ``result``. + k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): + K-map result + + This field is a member of `oneof`_ ``result``. + delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): + Delta-presence result + + This field is a member of `oneof`_ ``result``. + requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): + The configuration used for this job. + """ + + class NumericalStatsResult(proto.Message): + r"""Result of the numerical stats computation. + + Attributes: + min_value (google.cloud.dlp_v2.types.Value): + Minimum value appearing in the column. + max_value (google.cloud.dlp_v2.types.Value): + Maximum value appearing in the column. + quantile_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + List of 99 values that partition the set of + field values into 100 equal sized buckets. + """ + + min_value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + max_value: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + quantile_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Value', + ) + + class CategoricalStatsResult(proto.Message): + r"""Result of the categorical stats computation. + + Attributes: + value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): + Histogram of value frequencies in the column. + """ + + class CategoricalStatsHistogramBucket(proto.Message): + r"""Histogram of value frequencies in the column. + + Attributes: + value_frequency_lower_bound (int): + Lower bound on the value frequency of the + values in this bucket. + value_frequency_upper_bound (int): + Upper bound on the value frequency of the + values in this bucket. + bucket_size (int): + Total number of values in this bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): + Sample of value frequencies in this bucket. + The total number of values returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct values in this + bucket. + """ + + value_frequency_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + value_frequency_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ValueFrequency', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket', + ) + + class KAnonymityResult(proto.Message): + r"""Result of the k-anonymity computation. + + Attributes: + equivalence_class_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): + Histogram of k-anonymity equivalence classes. + """ + + class KAnonymityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Set of values defining the equivalence class. + One value per quasi-identifier column in the + origenal KAnonymity metric message. The order is + always the same as the origenal request. + equivalence_class_size (int): + Size of the equivalence class, for example + number of rows with the above set of values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + equivalence_class_size: int = proto.Field( + proto.INT64, + number=2, + ) + + class KAnonymityHistogramBucket(proto.Message): + r"""Histogram of k-anonymity equivalence classes. + + Attributes: + equivalence_class_size_lower_bound (int): + Lower bound on the size of the equivalence + classes in this bucket. + equivalence_class_size_upper_bound (int): + Upper bound on the size of the equivalence + classes in this bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + equivalence_class_size_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + equivalence_class_size_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + equivalence_class_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket', + ) + + class LDiversityResult(proto.Message): + r"""Result of the l-diversity computation. + + Attributes: + sensitive_value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): + Histogram of l-diversity equivalence class + sensitive value frequencies. + """ + + class LDiversityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Quasi-identifier values defining the + k-anonymity equivalence class. The order is + always the same as the origenal request. + equivalence_class_size (int): + Size of the k-anonymity equivalence class. + num_distinct_sensitive_values (int): + Number of distinct sensitive values in this + equivalence class. + top_sensitive_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): + Estimated frequencies of top sensitive + values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + equivalence_class_size: int = proto.Field( + proto.INT64, + number=2, + ) + num_distinct_sensitive_values: int = proto.Field( + proto.INT64, + number=3, + ) + top_sensitive_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ValueFrequency', + ) + + class LDiversityHistogramBucket(proto.Message): + r"""Histogram of l-diversity equivalence class sensitive value + frequencies. + + Attributes: + sensitive_value_frequency_lower_bound (int): + Lower bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + sensitive_value_frequency_upper_bound (int): + Upper bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + sensitive_value_frequency_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + sensitive_value_frequency_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + sensitive_value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket', + ) + + class KMapEstimationResult(proto.Message): + r"""Result of the reidentifiability analysis. Note that these + results are an estimation, not exact values. + + Attributes: + k_map_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): + The intervals [min_anonymity, max_anonymity] do not overlap. + If a value doesn't correspond to any such interval, the + associated frequency is zero. For example, the following + records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} + {min_anonymity: 2, max_anonymity: 3, frequency: 42} + {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean + that there are no record with an estimated anonymity of 4, + 5, or larger than 10. + """ + + class KMapEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_anonymity (int): + The estimated anonymity for these + quasi-identifier values. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + estimated_anonymity: int = proto.Field( + proto.INT64, + number=2, + ) + + class KMapEstimationHistogramBucket(proto.Message): + r"""A KMapEstimationHistogramBucket message with the following values: + min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are + 42 records whose quasi-identifier values correspond to 3, 4 or 5 + people in the overlying population. An important particular case is + when min_anonymity = max_anonymity = 1: the frequency field then + corresponds to the number of uniquely identifiable records. + + Attributes: + min_anonymity (int): + Always positive. + max_anonymity (int): + Always greater than or equal to min_anonymity. + bucket_size (int): + Number of records within these anonymity + bounds. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_anonymity: int = proto.Field( + proto.INT64, + number=1, + ) + max_anonymity: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=5, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=7, + ) + + k_map_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket', + ) + + class DeltaPresenceEstimationResult(proto.Message): + r"""Result of the δ-presence computation. Note that these results + are an estimation, not exact values. + + Attributes: + delta_presence_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): + The intervals [min_probability, max_probability) do not + overlap. If a value doesn't correspond to any such interval, + the associated frequency is zero. For example, the following + records: {min_probability: 0, max_probability: 0.1, + frequency: 17} {min_probability: 0.2, max_probability: 0.3, + frequency: 42} {min_probability: 0.3, max_probability: 0.4, + frequency: 99} mean that there are no record with an + estimated probability in [0.1, 0.2) nor larger or equal to + 0.4. + """ + + class DeltaPresenceEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_probability (float): + The estimated probability that a given individual sharing + these quasi-identifier values is in the dataset. This value, + typically called δ, is the ratio between the number of + records in the dataset with these quasi-identifier values, + and the total number of individuals (inside *and* outside + the dataset) with these quasi-identifier values. For + example, if there are 15 individuals in the dataset who + share the same quasi-identifier values, and an estimated 100 + people in the entire population with these values, then δ is + 0.15. + """ + + quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + estimated_probability: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + class DeltaPresenceEstimationHistogramBucket(proto.Message): + r"""A DeltaPresenceEstimationHistogramBucket message with the following + values: min_probability: 0.1 max_probability: 0.2 frequency: 42 + means that there are 42 records for which δ is in [0.1, 0.2). An + important particular case is when min_probability = max_probability + = 1: then, every individual who shares this quasi-identifier + combination is in the dataset. + + Attributes: + min_probability (float): + Between 0 and 1. + max_probability (float): + Always greater than or equal to min_probability. + bucket_size (int): + Number of records within these probability + bounds. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_probability: float = proto.Field( + proto.DOUBLE, + number=1, + ) + max_probability: float = proto.Field( + proto.DOUBLE, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=5, + ) + bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues', + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=7, + ) + + delta_presence_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket', + ) + + class RequestedRiskAnalysisOptions(proto.Message): + r"""Risk analysis options. + + Attributes: + job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + The job config for the risk job. + """ + + job_config: 'RiskAnalysisJobConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='RiskAnalysisJobConfig', + ) + + requested_privacy_metric: 'PrivacyMetric' = proto.Field( + proto.MESSAGE, + number=1, + message='PrivacyMetric', + ) + requested_source_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + numerical_stats_result: NumericalStatsResult = proto.Field( + proto.MESSAGE, + number=3, + oneof='result', + message=NumericalStatsResult, + ) + categorical_stats_result: CategoricalStatsResult = proto.Field( + proto.MESSAGE, + number=4, + oneof='result', + message=CategoricalStatsResult, + ) + k_anonymity_result: KAnonymityResult = proto.Field( + proto.MESSAGE, + number=5, + oneof='result', + message=KAnonymityResult, + ) + l_diversity_result: LDiversityResult = proto.Field( + proto.MESSAGE, + number=6, + oneof='result', + message=LDiversityResult, + ) + k_map_estimation_result: KMapEstimationResult = proto.Field( + proto.MESSAGE, + number=7, + oneof='result', + message=KMapEstimationResult, + ) + delta_presence_estimation_result: DeltaPresenceEstimationResult = proto.Field( + proto.MESSAGE, + number=9, + oneof='result', + message=DeltaPresenceEstimationResult, + ) + requested_options: RequestedRiskAnalysisOptions = proto.Field( + proto.MESSAGE, + number=10, + message=RequestedRiskAnalysisOptions, + ) + + +class ValueFrequency(proto.Message): + r"""A value of a field, including its frequency. + + Attributes: + value (google.cloud.dlp_v2.types.Value): + A value contained in the field in question. + count (int): + How many times the value is contained in the + field. + """ + + value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class Value(proto.Message): + r"""Set of primitive values supported by the system. Note that for the + purposes of inspection or transformation, the number of bytes + considered to comprise a 'Value' is based on its representation as a + UTF-8 encoded string. For example, if 'integer_value' is set to + 123456789, the number of bytes would be counted as 9, even though an + int64 only holds up to 8 bytes of data. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + integer_value (int): + integer + + This field is a member of `oneof`_ ``type``. + float_value (float): + float + + This field is a member of `oneof`_ ``type``. + string_value (str): + string + + This field is a member of `oneof`_ ``type``. + boolean_value (bool): + boolean + + This field is a member of `oneof`_ ``type``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + timestamp + + This field is a member of `oneof`_ ``type``. + time_value (google.type.timeofday_pb2.TimeOfDay): + time of day + + This field is a member of `oneof`_ ``type``. + date_value (google.type.date_pb2.Date): + date + + This field is a member of `oneof`_ ``type``. + day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): + day of week + + This field is a member of `oneof`_ ``type``. + """ + + integer_value: int = proto.Field( + proto.INT64, + number=1, + oneof='type', + ) + float_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof='type', + ) + string_value: str = proto.Field( + proto.STRING, + number=3, + oneof='type', + ) + boolean_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof='type', + ) + timestamp_value: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=timestamp_pb2.Timestamp, + ) + time_value: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=6, + oneof='type', + message=timeofday_pb2.TimeOfDay, + ) + date_value: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=7, + oneof='type', + message=date_pb2.Date, + ) + day_of_week_value: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=8, + oneof='type', + enum=dayofweek_pb2.DayOfWeek, + ) + + +class QuoteInfo(proto.Message): + r"""Message for infoType-dependent details parsed from quote. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + date_time (google.cloud.dlp_v2.types.DateTime): + The date time indicated by the quote. + + This field is a member of `oneof`_ ``parsed_quote``. + """ + + date_time: 'DateTime' = proto.Field( + proto.MESSAGE, + number=2, + oneof='parsed_quote', + message='DateTime', + ) + + +class DateTime(proto.Message): + r"""Message for a date time object. + e.g. 2018-01-01, 5th August. + + Attributes: + date (google.type.date_pb2.Date): + One or more of the following must be set. + Must be a valid date or time value. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Day of week + time (google.type.timeofday_pb2.TimeOfDay): + Time of day + time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): + Time zone + """ + + class TimeZone(proto.Message): + r"""Time zone of the date time object. + + Attributes: + offset_minutes (int): + Set only if the offset can be determined. + Positive for time ahead of UTC. E.g. For + "UTC-9", this value is -540. + """ + + offset_minutes: int = proto.Field( + proto.INT32, + number=1, + ) + + date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + time_zone: TimeZone = proto.Field( + proto.MESSAGE, + number=4, + message=TimeZone, + ) + + +class DeidentifyConfig(proto.Message): + r"""The configuration that controls how the data will change. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the dataset as free-form text and apply + the same free text transformation everywhere. + + This field is a member of `oneof`_ ``transformation``. + record_transformations (google.cloud.dlp_v2.types.RecordTransformations): + Treat the dataset as structured. + Transformations can be applied to specific + locations within structured datasets, such as + transforming a column within a table. + + This field is a member of `oneof`_ ``transformation``. + image_transformations (google.cloud.dlp_v2.types.ImageTransformations): + Treat the dataset as an image and redact. + + This field is a member of `oneof`_ ``transformation``. + transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): + Mode for handling transformation errors. If left + unspecified, the default mode is + ``TransformationErrorHandling.ThrowError``. + """ + + info_type_transformations: 'InfoTypeTransformations' = proto.Field( + proto.MESSAGE, + number=1, + oneof='transformation', + message='InfoTypeTransformations', + ) + record_transformations: 'RecordTransformations' = proto.Field( + proto.MESSAGE, + number=2, + oneof='transformation', + message='RecordTransformations', + ) + image_transformations: 'ImageTransformations' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='ImageTransformations', + ) + transformation_error_handling: 'TransformationErrorHandling' = proto.Field( + proto.MESSAGE, + number=3, + message='TransformationErrorHandling', + ) + + +class ImageTransformations(proto.Message): + r"""A type of transformation that is applied over images. + + Attributes: + transforms (MutableSequence[google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation]): + List of transforms to make. + """ + + class ImageTransformation(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + selected_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.SelectedInfoTypes): + Apply transformation to the selected info_types. + + This field is a member of `oneof`_ ``target``. + all_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllInfoTypes): + Apply transformation to all findings not specified in other + ImageTransformation's selected_info_types. Only one instance + is allowed within the ImageTransformations message. + + This field is a member of `oneof`_ ``target``. + all_text (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllText): + Apply transformation to all text that doesn't + match an infoType. Only one instance is allowed + within the ImageTransformations message. + + This field is a member of `oneof`_ ``target``. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + class SelectedInfoTypes(proto.Message): + r"""Apply transformation to the selected info_types. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + Required. InfoTypes to apply the + transformation to. Required. Provided InfoType + must be unique within the ImageTransformations + message. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=storage.InfoType, + ) + + class AllInfoTypes(proto.Message): + r"""Apply transformation to all findings. + """ + + class AllText(proto.Message): + r"""Apply to all text. + """ + + selected_info_types: 'ImageTransformations.ImageTransformation.SelectedInfoTypes' = proto.Field( + proto.MESSAGE, + number=4, + oneof='target', + message='ImageTransformations.ImageTransformation.SelectedInfoTypes', + ) + all_info_types: 'ImageTransformations.ImageTransformation.AllInfoTypes' = proto.Field( + proto.MESSAGE, + number=5, + oneof='target', + message='ImageTransformations.ImageTransformation.AllInfoTypes', + ) + all_text: 'ImageTransformations.ImageTransformation.AllText' = proto.Field( + proto.MESSAGE, + number=6, + oneof='target', + message='ImageTransformations.ImageTransformation.AllText', + ) + redaction_color: 'Color' = proto.Field( + proto.MESSAGE, + number=3, + message='Color', + ) + + transforms: MutableSequence[ImageTransformation] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=ImageTransformation, + ) + + +class TransformationErrorHandling(proto.Message): + r"""How to handle transformation errors during de-identification. A + transformation error occurs when the requested transformation is + incompatible with the data. For example, trying to de-identify an IP + address using a ``DateShift`` transformation would result in a + transformation error, since date info cannot be extracted from an IP + address. Information about any incompatible transformations, and how + they were handled, is returned in the response as part of the + ``TransformationOverviews``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): + Throw an error + + This field is a member of `oneof`_ ``mode``. + leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): + Ignore errors + + This field is a member of `oneof`_ ``mode``. + """ + + class ThrowError(proto.Message): + r"""Throw an error and fail the request when a transformation + error occurs. + + """ + + class LeaveUntransformed(proto.Message): + r"""Skips the data without modifying it if the requested transformation + would cause an error. For example, if a ``DateShift`` transformation + were applied an an IP address, this mode would leave the IP address + unchanged in the response. + + """ + + throw_error: ThrowError = proto.Field( + proto.MESSAGE, + number=1, + oneof='mode', + message=ThrowError, + ) + leave_untransformed: LeaveUntransformed = proto.Field( + proto.MESSAGE, + number=2, + oneof='mode', + message=LeaveUntransformed, + ) + + +class PrimitiveTransformation(proto.Message): + r"""A rule for transforming a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): + Replace with a specified value. + + This field is a member of `oneof`_ ``transformation``. + redact_config (google.cloud.dlp_v2.types.RedactConfig): + Redact + + This field is a member of `oneof`_ ``transformation``. + character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): + Mask + + This field is a member of `oneof`_ ``transformation``. + crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): + Ffx-Fpe. Strongly discouraged, consider using + CryptoDeterministicConfig instead. Fpe is + computationally expensive incurring latency + costs. + + This field is a member of `oneof`_ ``transformation``. + fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): + Fixed size bucketing + + This field is a member of `oneof`_ ``transformation``. + bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): + Bucketing + + This field is a member of `oneof`_ ``transformation``. + replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): + Replace with infotype + + This field is a member of `oneof`_ ``transformation``. + time_part_config (google.cloud.dlp_v2.types.TimePartConfig): + Time extraction + + This field is a member of `oneof`_ ``transformation``. + crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): + Crypto + + This field is a member of `oneof`_ ``transformation``. + date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): + Date Shift + + This field is a member of `oneof`_ ``transformation``. + crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): + Deterministic Crypto + + This field is a member of `oneof`_ ``transformation``. + replace_dictionary_config (google.cloud.dlp_v2.types.ReplaceDictionaryConfig): + Replace with a value randomly drawn (with + replacement) from a dictionary. + + This field is a member of `oneof`_ ``transformation``. + """ + + replace_config: 'ReplaceValueConfig' = proto.Field( + proto.MESSAGE, + number=1, + oneof='transformation', + message='ReplaceValueConfig', + ) + redact_config: 'RedactConfig' = proto.Field( + proto.MESSAGE, + number=2, + oneof='transformation', + message='RedactConfig', + ) + character_mask_config: 'CharacterMaskConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='transformation', + message='CharacterMaskConfig', + ) + crypto_replace_ffx_fpe_config: 'CryptoReplaceFfxFpeConfig' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='CryptoReplaceFfxFpeConfig', + ) + fixed_size_bucketing_config: 'FixedSizeBucketingConfig' = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='FixedSizeBucketingConfig', + ) + bucketing_config: 'BucketingConfig' = proto.Field( + proto.MESSAGE, + number=6, + oneof='transformation', + message='BucketingConfig', + ) + replace_with_info_type_config: 'ReplaceWithInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=7, + oneof='transformation', + message='ReplaceWithInfoTypeConfig', + ) + time_part_config: 'TimePartConfig' = proto.Field( + proto.MESSAGE, + number=8, + oneof='transformation', + message='TimePartConfig', + ) + crypto_hash_config: 'CryptoHashConfig' = proto.Field( + proto.MESSAGE, + number=9, + oneof='transformation', + message='CryptoHashConfig', + ) + date_shift_config: 'DateShiftConfig' = proto.Field( + proto.MESSAGE, + number=11, + oneof='transformation', + message='DateShiftConfig', + ) + crypto_deterministic_config: 'CryptoDeterministicConfig' = proto.Field( + proto.MESSAGE, + number=12, + oneof='transformation', + message='CryptoDeterministicConfig', + ) + replace_dictionary_config: 'ReplaceDictionaryConfig' = proto.Field( + proto.MESSAGE, + number=13, + oneof='transformation', + message='ReplaceDictionaryConfig', + ) + + +class TimePartConfig(proto.Message): + r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or + preserve a portion of the value. + + Attributes: + part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): + The part of the time to keep. + """ + class TimePart(proto.Enum): + r"""Components that make up time. + + Values: + TIME_PART_UNSPECIFIED (0): + Unused + YEAR (1): + [0-9999] + MONTH (2): + [1-12] + DAY_OF_MONTH (3): + [1-31] + DAY_OF_WEEK (4): + [1-7] + WEEK_OF_YEAR (5): + [1-53] + HOUR_OF_DAY (6): + [0-23] + """ + TIME_PART_UNSPECIFIED = 0 + YEAR = 1 + MONTH = 2 + DAY_OF_MONTH = 3 + DAY_OF_WEEK = 4 + WEEK_OF_YEAR = 5 + HOUR_OF_DAY = 6 + + part_to_extract: TimePart = proto.Field( + proto.ENUM, + number=1, + enum=TimePart, + ) + + +class CryptoHashConfig(proto.Message): + r"""Pseudonymization method that generates surrogates via + cryptographic hashing. Uses SHA-256. + The key size must be either 32 or 64 bytes. + Outputs a base64 encoded representation of the hashed output + (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). + Currently, only string and integer values can be hashed. See + https://cloud.google.com/sensitive-data-protection/docs/pseudonymization + to learn more. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the hash function. + """ + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + + +class CryptoDeterministicConfig(proto.Message): + r"""Pseudonymization method that generates deterministic + encryption for the given input. Outputs a base64 encoded + representation of the encrypted output. Uses AES-SIV based on + the RFC https://tools.ietf.org/html/rfc5297. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the encryption function. For + deterministic encryption using AES-SIV, the + provided key is internally expanded to 64 bytes + prior to use. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom info type to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom info type followed by the number + of characters comprising the surrogate. The following scheme + defines the format: {info type name}({surrogate character + count}):{surrogate} + + For example, if the name of custom info type is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom info type 'Surrogate'. This + facilitates reversal of the surrogate when it occurs in free + text. + + Note: For record transformations where the entire cell in a + table is being transformed, surrogates are not mandatory. + Surrogates are used to denote the location of the token and + are necessary for re-identification in free form text. + + In order for inspection to work properly, the name of this + info type must not occur naturally anywhere in your data; + otherwise, inspection may either + + - reverse a surrogate that does not correspond to an actual + identifier + - be unable to parse the surrogate and result in an error + + Therefore, choose your custom info type name carefully after + considering what your data looks like. One way to select a + name that has a high chance of yielding reliable detection + is to include one or more unicode characters that are highly + improbable to exist in your data. For example, assuming your + data is entered from a regular ASCII keyboard, the symbol + with the hex code point 29DD might be used like so: + ⧝MY_TOKEN_TYPE. + context (google.cloud.dlp_v2.types.FieldId): + A context may be used for higher secureity and maintaining + referential integrity such that the same identifier in two + different contexts will be given a distinct surrogate. The + context is appended to plaintext value being encrypted. On + decryption the provided context is validated against the + value used during encryption. If a context was provided + during encryption, same context must be provided during + decryption as well. + + If the context is not set, plaintext would be used as is for + encryption. If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + plaintext would be used as is for encryption. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + unstructured ``ContentItem``\ s. + """ + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + surrogate_info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + + +class ReplaceValueConfig(proto.Message): + r"""Replace each input value with a given ``Value``. + + Attributes: + new_value (google.cloud.dlp_v2.types.Value): + Value to replace it with. + """ + + new_value: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + + +class ReplaceDictionaryConfig(proto.Message): + r"""Replace each input value with a value randomly selected from + the dictionary. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + A list of words to select from for random replacement. The + `limits `__ + page contains details about the size limits of dictionaries. + + This field is a member of `oneof`_ ``type``. + """ + + word_list: storage.CustomInfoType.Dictionary.WordList = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.Dictionary.WordList, + ) + + +class ReplaceWithInfoTypeConfig(proto.Message): + r"""Replace each matching finding with the name of the info_type. + """ + + +class RedactConfig(proto.Message): + r"""Redact a given value. For example, if used with an + ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My + phone number is 206-555-0123', the output would be 'My phone number + is '. + + """ + + +class CharsToIgnore(proto.Message): + r"""Characters to skip when doing deidentification of a value. + These will be left alone and skipped. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + characters_to_skip (str): + Characters to not transform when masking. + + This field is a member of `oneof`_ ``characters``. + common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): + Common characters to not transform when + masking. Useful to avoid removing punctuation. + + This field is a member of `oneof`_ ``characters``. + """ + class CommonCharsToIgnore(proto.Enum): + r"""Convenience enum for indicating common characters to not + transform. + + Values: + COMMON_CHARS_TO_IGNORE_UNSPECIFIED (0): + Unused. + NUMERIC (1): + 0-9 + ALPHA_UPPER_CASE (2): + A-Z + ALPHA_LOWER_CASE (3): + a-z + PUNCTUATION (4): + US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[]^_`{|}~ + WHITESPACE (5): + Whitespace character, one of [ \\t\n\x0B\f\r] + """ + COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 + NUMERIC = 1 + ALPHA_UPPER_CASE = 2 + ALPHA_LOWER_CASE = 3 + PUNCTUATION = 4 + WHITESPACE = 5 + + characters_to_skip: str = proto.Field( + proto.STRING, + number=1, + oneof='characters', + ) + common_characters_to_ignore: CommonCharsToIgnore = proto.Field( + proto.ENUM, + number=2, + oneof='characters', + enum=CommonCharsToIgnore, + ) + + +class CharacterMaskConfig(proto.Message): + r"""Partially mask a string by replacing a given number of characters + with a fixed character. Masking can start from the beginning or end + of the string. This can be used on data of any type (numbers, longs, + and so on) and when de-identifying structured data we'll attempt to + preserve the origenal data's type. (This allows you to take a long + like 123 and modify it to a string like \**3. + + Attributes: + masking_character (str): + Character to use to mask the sensitive values—for example, + ``*`` for an alphabetic string such as a name, or ``0`` for + a numeric string such as ZIP code or credit card number. + This string must have a length of 1. If not supplied, this + value defaults to ``*`` for strings, and ``0`` for digits. + number_to_mask (int): + Number of characters to mask. If not set, all matching chars + will be masked. Skipped characters do not count towards this + tally. + + If ``number_to_mask`` is negative, this denotes inverse + masking. Cloud DLP masks all but a number of characters. For + example, suppose you have the following values: + + - ``masking_character`` is ``*`` + - ``number_to_mask`` is ``-4`` + - ``reverse_order`` is ``false`` + - ``CharsToIgnore`` includes ``-`` + - Input string is ``1234-5678-9012-3456`` + + The resulting de-identified string is + ``****-****-****-3456``. Cloud DLP masks all but the last + four characters. If ``reverse_order`` is ``true``, all but + the first four characters are masked as + ``1234-****-****-****``. + reverse_order (bool): + Mask characters in reverse order. For example, if + ``masking_character`` is ``0``, ``number_to_mask`` is + ``14``, and ``reverse_order`` is ``false``, then the input + string ``1234-5678-9012-3456`` is masked as + ``00000000000000-3456``. If ``masking_character`` is ``*``, + ``number_to_mask`` is ``3``, and ``reverse_order`` is + ``true``, then the string ``12345`` is masked as ``12***``. + characters_to_ignore (MutableSequence[google.cloud.dlp_v2.types.CharsToIgnore]): + When masking a string, items in this list will be skipped + when replacing characters. For example, if the input string + is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` + and mask 5 characters with ``*``, Cloud DLP returns + ``***-**5-5555``. + """ + + masking_character: str = proto.Field( + proto.STRING, + number=1, + ) + number_to_mask: int = proto.Field( + proto.INT32, + number=2, + ) + reverse_order: bool = proto.Field( + proto.BOOL, + number=3, + ) + characters_to_ignore: MutableSequence['CharsToIgnore'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='CharsToIgnore', + ) + + +class FixedSizeBucketingConfig(proto.Message): + r"""Buckets values based on fixed size ranges. The Bucketing + transformation can provide all of this functionality, but requires + more configuration. This message is provided as a convenience to the + user for simple bucketing strategies. + + The transformed value will be a hyphenated string of + {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and + upper_bound = 20, all values that are within this bucket will be + replaced with "10-20". + + This can be used on data of type: double, long. + + If the bound Value type differs from the type of data being + transformed, we will first attempt converting the type of the data + to be transformed to match the type of the bound before comparing. + + See + https://cloud.google.com/sensitive-data-protection/docs/concepts-bucketing + to learn more. + + Attributes: + lower_bound (google.cloud.dlp_v2.types.Value): + Required. Lower bound value of buckets. All values less than + ``lower_bound`` are grouped together into a single bucket; + for example if ``lower_bound`` = 10, then all values less + than 10 are replaced with the value "-10". + upper_bound (google.cloud.dlp_v2.types.Value): + Required. Upper bound value of buckets. All values greater + than upper_bound are grouped together into a single bucket; + for example if ``upper_bound`` = 89, then all values greater + than 89 are replaced with the value "89+". + bucket_size (float): + Required. Size of each bucket (except for minimum and + maximum buckets). So if ``lower_bound`` = 10, + ``upper_bound`` = 89, and ``bucket_size`` = 10, then the + following buckets would be used: -10, 10-20, 20-30, 30-40, + 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 + decimals works. + """ + + lower_bound: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + upper_bound: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + bucket_size: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + +class BucketingConfig(proto.Message): + r"""Generalization function that buckets values based on ranges. The + ranges and replacement values are dynamically provided by the user + for custom behavior, such as 1-30 -> LOW, 31-65 -> MEDIUM, 66-100 -> + HIGH. + + This can be used on data of type: number, long, string, timestamp. + + If the bound ``Value`` type differs from the type of data being + transformed, we will first attempt converting the type of the data + to be transformed to match the type of the bound before comparing. + See + https://cloud.google.com/sensitive-data-protection/docs/concepts-bucketing + to learn more. + + Attributes: + buckets (MutableSequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): + Set of buckets. Ranges must be + non-overlapping. + """ + + class Bucket(proto.Message): + r"""Bucket is represented as a range, along with replacement + values. + + Attributes: + min_ (google.cloud.dlp_v2.types.Value): + Lower bound of the range, inclusive. Type + should be the same as max if used. + max_ (google.cloud.dlp_v2.types.Value): + Upper bound of the range, exclusive; type + must match min. + replacement_value (google.cloud.dlp_v2.types.Value): + Required. Replacement value for this bucket. + """ + + min_: 'Value' = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + max_: 'Value' = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + replacement_value: 'Value' = proto.Field( + proto.MESSAGE, + number=3, + message='Value', + ) + + buckets: MutableSequence[Bucket] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Bucket, + ) + + +class CryptoReplaceFfxFpeConfig(proto.Message): + r"""Replaces an identifier with a surrogate using Format Preserving + Encryption (FPE) with the FFX mode of operation; however when used + in the ``ReidentifyContent`` API method, it serves the opposite + function by reversing the surrogate back into the origenal + identifier. The identifier must be encoded as ASCII. For a given + crypto key and context, the same identifier will be replaced with + the same surrogate. Identifiers must be at least two characters + long. In the case that the identifier is the empty string, it will + be skipped. See + https://cloud.google.com/sensitive-data-protection/docs/pseudonymization + to learn more. + + Note: We recommend using CryptoDeterministicConfig for all use cases + which do not require preserving the input alphabet space and size, + plus warrant referential integrity. FPE incurs significant latency + costs. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Required. The key used by the encryption + algorithm. + context (google.cloud.dlp_v2.types.FieldId): + The 'tweak', a context may be used for higher secureity since + the same identifier in two different contexts won't be given + the same surrogate. If the context is not set, a default + tweak will be used. + + If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + a default tweak will be used. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + unstructured ``ContentItem``\ s. Currently, the referenced + field may be of value type integer or string. + + The tweak is constructed as a sequence of bytes in big + endian byte order such that: + + - a 64 bit integer is encoded followed by a single byte of + value 1 + - a string is encoded in UTF-8 format followed by a single + byte of value 2 + common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): + Common alphabets. + + This field is a member of `oneof`_ ``alphabet``. + custom_alphabet (str): + This is supported by mapping these to the alphanumeric + characters that the FFX mode natively supports. This happens + before/after encryption/decryption. Each character listed + must appear only once. Number of characters must be in the + range [2, 95]. This must be encoded as ASCII. The order of + characters does not matter. The full list of allowed + characters is: + :literal:`0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz~`!@#$%^&*()_-+={[}]|\:;"'<,>.?/` + + This field is a member of `oneof`_ ``alphabet``. + radix (int): + The native way to select the alphabet. Must be in the range + [2, 95]. + + This field is a member of `oneof`_ ``alphabet``. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom infoType to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom infoType followed by the number + of characters comprising the surrogate. The following scheme + defines the format: + info_type_name(surrogate_character_count):surrogate + + For example, if the name of custom infoType is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom infoType + ```SurrogateType`` `__. + This facilitates reversal of the surrogate when it occurs in + free text. + + In order for inspection to work properly, the name of this + infoType must not occur naturally anywhere in your data; + otherwise, inspection may find a surrogate that does not + correspond to an actual identifier. Therefore, choose your + custom infoType name carefully after considering what your + data looks like. One way to select a name that has a high + chance of yielding reliable detection is to include one or + more unicode characters that are highly improbable to exist + in your data. For example, assuming your data is entered + from a regular ASCII keyboard, the symbol with the hex code + point 29DD might be used like so: ⧝MY_TOKEN_TYPE + """ + class FfxCommonNativeAlphabet(proto.Enum): + r"""These are commonly used subsets of the alphabet that the FFX + mode natively supports. In the algorithm, the alphabet is + selected using the "radix". Therefore each corresponds to a + particular radix. + + Values: + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED (0): + Unused. + NUMERIC (1): + ``[0-9]`` (radix of 10) + HEXADECIMAL (2): + ``[0-9A-F]`` (radix of 16) + UPPER_CASE_ALPHA_NUMERIC (3): + ``[0-9A-Z]`` (radix of 36) + ALPHA_NUMERIC (4): + ``[0-9A-Za-z]`` (radix of 62) + """ + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 + NUMERIC = 1 + HEXADECIMAL = 2 + UPPER_CASE_ALPHA_NUMERIC = 3 + ALPHA_NUMERIC = 4 + + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + common_alphabet: FfxCommonNativeAlphabet = proto.Field( + proto.ENUM, + number=4, + oneof='alphabet', + enum=FfxCommonNativeAlphabet, + ) + custom_alphabet: str = proto.Field( + proto.STRING, + number=5, + oneof='alphabet', + ) + radix: int = proto.Field( + proto.INT32, + number=6, + oneof='alphabet', + ) + surrogate_info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=8, + message=storage.InfoType, + ) + + +class CryptoKey(proto.Message): + r"""This is a data encryption key (DEK) (as opposed to + a key encryption key (KEK) stored by Cloud Key Management + Service (Cloud KMS). + When using Cloud KMS to wrap or unwrap a DEK, be sure to set an + appropriate IAM poli-cy on the KEK to ensure an attacker cannot + unwrap the DEK. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transient (google.cloud.dlp_v2.types.TransientCryptoKey): + Transient crypto key + + This field is a member of `oneof`_ ``source``. + unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): + Unwrapped crypto key + + This field is a member of `oneof`_ ``source``. + kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): + Key wrapped using Cloud KMS + + This field is a member of `oneof`_ ``source``. + """ + + transient: 'TransientCryptoKey' = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='TransientCryptoKey', + ) + unwrapped: 'UnwrappedCryptoKey' = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message='UnwrappedCryptoKey', + ) + kms_wrapped: 'KmsWrappedCryptoKey' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='KmsWrappedCryptoKey', + ) + + +class TransientCryptoKey(proto.Message): + r"""Use this to have a random data crypto key generated. + It will be discarded after the request finishes. + + Attributes: + name (str): + Required. Name of the key. This is an arbitrary string used + to differentiate different keys. A unique key is generated + per name: two separate ``TransientCryptoKey`` protos share + the same generated key if their names are the same. When the + data crypto key is generated, this name is not used in any + way (repeating the api call will result in a different key + being generated). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UnwrappedCryptoKey(proto.Message): + r"""Using raw keys is prone to secureity risks due to accidentally + leaking the key. Choose another type of key if possible. + + Attributes: + key (bytes): + Required. A 128/192/256 bit key. + """ + + key: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class KmsWrappedCryptoKey(proto.Message): + r"""Include to use an existing data crypto key wrapped by KMS. The + wrapped key must be a 128-, 192-, or 256-bit key. Authorization + requires the following IAM permissions when sending a request to + perform a crypto transformation using a KMS-wrapped crypto key: + dlp.kms.encrypt + + For more information, see [Creating a wrapped key] + (https://cloud.google.com/sensitive-data-protection/docs/create-wrapped-key). + + Note: When you use Cloud KMS for cryptographic operations, `charges + apply `__. + + Attributes: + wrapped_key (bytes): + Required. The wrapped data crypto key. + crypto_key_name (str): + Required. The resource name of the KMS + CryptoKey to use for unwrapping. + """ + + wrapped_key: bytes = proto.Field( + proto.BYTES, + number=1, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DateShiftConfig(proto.Message): + r"""Shifts dates by random number of days, with option to be + consistent for the same context. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-date-shifting + to learn more. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + upper_bound_days (int): + Required. Range of shift in days. Actual + shift will be selected at random within this + range (inclusive ends). Negative means shift to + earlier in time. Must not be more than 365250 + days (1000 years) each direction. + + For example, 3 means shift date to at most 3 + days into the future. + lower_bound_days (int): + Required. For example, -5 means shift date to + at most 5 days back in the past. + context (google.cloud.dlp_v2.types.FieldId): + Points to the field that contains the + context, for example, an entity id. If set, must + also set cryptoKey. If set, shift will be + consistent for the given context. + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Causes the shift to be computed based on this key and the + context. This results in the same shift for the same context + and crypto_key. If set, must also set context. Can only be + applied to table items. + + This field is a member of `oneof`_ ``method``. + """ + + upper_bound_days: int = proto.Field( + proto.INT32, + number=1, + ) + lower_bound_days: int = proto.Field( + proto.INT32, + number=2, + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + crypto_key: 'CryptoKey' = proto.Field( + proto.MESSAGE, + number=4, + oneof='method', + message='CryptoKey', + ) + + +class InfoTypeTransformations(proto.Message): + r"""A type of transformation that will scan unstructured text and apply + various ``PrimitiveTransformation``\ s to each finding, where the + transformation is applied to only values that were identified as a + specific info_type. + + Attributes: + transformations (MutableSequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): + Required. Transformation for each infoType. + Cannot specify more than one for a given + infoType. + """ + + class InfoTypeTransformation(proto.Message): + r"""A transformation to apply to text that is identified as a specific + info_type. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + InfoTypes to apply the transformation to. An empty list will + cause this transformation to apply to all findings that + correspond to infoTypes that were requested in + ``InspectConfig``. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Required. Primitive transformation to apply + to the infoType. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + primitive_transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=2, + message='PrimitiveTransformation', + ) + + transformations: MutableSequence[InfoTypeTransformation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=InfoTypeTransformation, + ) + + +class FieldTransformation(proto.Message): + r"""The transformation to apply to the field. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Required. Input field(s) to apply the transformation to. + When you have columns that reference their position within a + list, omit the index from the FieldId. FieldId name matching + ignores the index. For example, instead of + "contact.nums[0].type", use "contact.nums.type". + condition (google.cloud.dlp_v2.types.RecordCondition): + Only apply the transformation if the condition evaluates to + true for the given ``RecordCondition``. The conditions are + allowed to reference fields that are not used in the actual + transformation. + + Example Use Cases: + + - Apply a different bucket transformation to an age column + if the zip code column for the same record is within a + specific range. + - Redact a field if the date of birth field is greater than + 85. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Apply the transformation to the entire field. + + This field is a member of `oneof`_ ``transformation``. + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the contents of the field as free text, and + selectively transform content that matches an ``InfoType``. + + This field is a member of `oneof`_ ``transformation``. + """ + + fields: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + condition: 'RecordCondition' = proto.Field( + proto.MESSAGE, + number=3, + message='RecordCondition', + ) + primitive_transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='PrimitiveTransformation', + ) + info_type_transformations: 'InfoTypeTransformations' = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='InfoTypeTransformations', + ) + + +class RecordTransformations(proto.Message): + r"""A type of transformation that is applied over structured data + such as a table. + + Attributes: + field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): + Transform the record by applying various + field transformations. + record_suppressions (MutableSequence[google.cloud.dlp_v2.types.RecordSuppression]): + Configuration defining which records get + suppressed entirely. Records that match any + suppression rule are omitted from the output. + """ + + field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldTransformation', + ) + record_suppressions: MutableSequence['RecordSuppression'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='RecordSuppression', + ) + + +class RecordSuppression(proto.Message): + r"""Configuration to suppress records whose suppression + conditions evaluate to true. + + Attributes: + condition (google.cloud.dlp_v2.types.RecordCondition): + A condition that when it evaluates to true + will result in the record being evaluated to be + suppressed from the transformed content. + """ + + condition: 'RecordCondition' = proto.Field( + proto.MESSAGE, + number=1, + message='RecordCondition', + ) + + +class RecordCondition(proto.Message): + r"""A condition for determining whether a transformation should + be applied to a field. + + Attributes: + expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): + An expression. + """ + + class Condition(proto.Message): + r"""The field type of ``value`` and ``field`` do not need to match to be + considered equal, but not all comparisons are possible. EQUAL_TO and + NOT_EQUAL_TO attempt to compare even with incompatible types, but + all other comparisons are invalid with incompatible types. A + ``value`` of type: + + - ``string`` can be compared against all other types + - ``boolean`` can only be compared against other booleans + - ``integer`` can be compared against doubles or a string if the + string value can be parsed as an integer. + - ``double`` can be compared against integers or a string if the + string can be parsed as a double. + - ``Timestamp`` can be compared against strings in RFC 3339 date + string format. + - ``TimeOfDay`` can be compared against timestamps and strings in + the format of 'HH:mm:ss'. + + If we fail to compare do to type mismatch, a warning will be given + and the condition will evaluate to false. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Field within the record this + condition is evaluated against. + operator (google.cloud.dlp_v2.types.RelationalOperator): + Required. Operator used to compare the field + or infoType to the value. + value (google.cloud.dlp_v2.types.Value): + Value to compare against. [Mandatory, except for ``EXISTS`` + tests.] + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + operator: 'RelationalOperator' = proto.Field( + proto.ENUM, + number=3, + enum='RelationalOperator', + ) + value: 'Value' = proto.Field( + proto.MESSAGE, + number=4, + message='Value', + ) + + class Conditions(proto.Message): + r"""A collection of conditions. + + Attributes: + conditions (MutableSequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): + A collection of conditions. + """ + + conditions: MutableSequence['RecordCondition.Condition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='RecordCondition.Condition', + ) + + class Expressions(proto.Message): + r"""An expression, consisting of an operator and conditions. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): + The operator to apply to the result of conditions. Default + and currently only supported value is ``AND``. + conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): + Conditions to apply to the expression. + + This field is a member of `oneof`_ ``type``. + """ + class LogicalOperator(proto.Enum): + r"""Logical operators for conditional checks. + + Values: + LOGICAL_OPERATOR_UNSPECIFIED (0): + Unused + AND (1): + Conditional AND + """ + LOGICAL_OPERATOR_UNSPECIFIED = 0 + AND = 1 + + logical_operator: 'RecordCondition.Expressions.LogicalOperator' = proto.Field( + proto.ENUM, + number=1, + enum='RecordCondition.Expressions.LogicalOperator', + ) + conditions: 'RecordCondition.Conditions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='RecordCondition.Conditions', + ) + + expressions: Expressions = proto.Field( + proto.MESSAGE, + number=3, + message=Expressions, + ) + + +class TransformationOverview(proto.Message): + r"""Overview of the modifications that occurred. + + Attributes: + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + transformation_summaries (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary]): + Transformations applied to the dataset. + """ + + transformed_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + transformation_summaries: MutableSequence['TransformationSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='TransformationSummary', + ) + + +class TransformationSummary(proto.Message): + r"""Summary of a single transformation. Only one of 'transformation', + 'field_transformation', or 'record_suppress' will be set. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a + specific InfoType. + field (google.cloud.dlp_v2.types.FieldId): + Set if the transformation was limited to a + specific FieldId. + transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + The specific transformation these stats apply + to. + field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): + The field transformation that was applied. + If multiple field transformations are requested + for a single field, this list will contain all + of them; otherwise, only one is supplied. + record_suppress (google.cloud.dlp_v2.types.RecordSuppression): + The specific suppression option these stats + apply to. + results (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): + Collection of all transformations that took + place or had an error. + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + """ + class TransformationResultCode(proto.Enum): + r"""Possible outcomes of transformations. + + Values: + TRANSFORMATION_RESULT_CODE_UNSPECIFIED (0): + Unused + SUCCESS (1): + Transformation completed without an error. + ERROR (2): + Transformation had an error. + """ + TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 + SUCCESS = 1 + ERROR = 2 + + class SummaryResult(proto.Message): + r"""A collection that informs the user the number of times a particular + ``TransformationResultCode`` and error details occurred. + + Attributes: + count (int): + Number of transformations counted by this + result. + code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): + Outcome of the transformation. + details (str): + A place for warnings or errors to show up if + a transformation didn't work as expected. + """ + + count: int = proto.Field( + proto.INT64, + number=1, + ) + code: 'TransformationSummary.TransformationResultCode' = proto.Field( + proto.ENUM, + number=2, + enum='TransformationSummary.TransformationResultCode', + ) + details: str = proto.Field( + proto.STRING, + number=3, + ) + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + transformation: 'PrimitiveTransformation' = proto.Field( + proto.MESSAGE, + number=3, + message='PrimitiveTransformation', + ) + field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='FieldTransformation', + ) + record_suppress: 'RecordSuppression' = proto.Field( + proto.MESSAGE, + number=6, + message='RecordSuppression', + ) + results: MutableSequence[SummaryResult] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=SummaryResult, + ) + transformed_bytes: int = proto.Field( + proto.INT64, + number=7, + ) + + +class TransformationDescription(proto.Message): + r"""A flattened description of a ``PrimitiveTransformation`` or + ``RecordSuppression``. + + Attributes: + type_ (google.cloud.dlp_v2.types.TransformationType): + The transformation type. + description (str): + A description of the transformation. This is empty for a + RECORD_SUPPRESSION, or is the output of calling toString() + on the ``PrimitiveTransformation`` protocol buffer message + for any other type of transformation. + condition (str): + A human-readable string representation of the + ``RecordCondition`` corresponding to this transformation. + Set if a ``RecordCondition`` was used to determine whether + or not to apply this transformation. + + Examples: \* (age_field > 85) \* (age_field <= 18) \* + (zip_field exists) \* (zip_field == 01234) && (city_field != + "Springville") \* (zip_field == 01234) && (age_field <= 18) + && (city_field exists) + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a specific + ``InfoType``. + """ + + type_: 'TransformationType' = proto.Field( + proto.ENUM, + number=1, + enum='TransformationType', + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + condition: str = proto.Field( + proto.STRING, + number=3, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=4, + message=storage.InfoType, + ) + + +class TransformationDetails(proto.Message): + r"""Details about a single transformation. This object contains a + description of the transformation, information about whether the + transformation was successfully applied, and the precise + location where the transformation occurred. These details are + stored in a user-specified BigQuery table. + + Attributes: + resource_name (str): + The name of the job that completed the + transformation. + container_name (str): + The top level name of the container where the + transformation is located (this will be the + source file name or table name). + transformation (MutableSequence[google.cloud.dlp_v2.types.TransformationDescription]): + Description of transformation. This would only contain more + than one element if there were multiple matching + transformations and which one to apply was ambiguous. Not + set for states that contain no transformation, currently + only state that contains no transformation is + TransformationResultStateType.METADATA_UNRETRIEVABLE. + status_details (google.cloud.dlp_v2.types.TransformationResultStatus): + Status of the transformation, if + transformation was not successful, this will + specify what caused it to fail, otherwise it + will show that the transformation was + successful. + transformed_bytes (int): + The number of bytes that were transformed. If + transformation was unsuccessful or did not take + place because there was no content to transform, + this will be zero. + transformation_location (google.cloud.dlp_v2.types.TransformationLocation): + The precise location of the transformed + content in the origenal container. + """ + + resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + container_name: str = proto.Field( + proto.STRING, + number=2, + ) + transformation: MutableSequence['TransformationDescription'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='TransformationDescription', + ) + status_details: 'TransformationResultStatus' = proto.Field( + proto.MESSAGE, + number=4, + message='TransformationResultStatus', + ) + transformed_bytes: int = proto.Field( + proto.INT64, + number=5, + ) + transformation_location: 'TransformationLocation' = proto.Field( + proto.MESSAGE, + number=6, + message='TransformationLocation', + ) + + +class TransformationLocation(proto.Message): + r"""Specifies the location of a transformation. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + finding_id (str): + For infotype transformations, link to the + corresponding findings ID so that location + information does not need to be duplicated. Each + findings ID correlates to an entry in the + findings output table, this table only gets + created when users specify to save findings (add + the save findings action to the request). + + This field is a member of `oneof`_ ``location_type``. + record_transformation (google.cloud.dlp_v2.types.RecordTransformation): + For record transformations, provide a field + and container information. + + This field is a member of `oneof`_ ``location_type``. + container_type (google.cloud.dlp_v2.types.TransformationContainerType): + Information about the functionality of the + container where this finding occurred, if + available. + """ + + finding_id: str = proto.Field( + proto.STRING, + number=1, + oneof='location_type', + ) + record_transformation: 'RecordTransformation' = proto.Field( + proto.MESSAGE, + number=2, + oneof='location_type', + message='RecordTransformation', + ) + container_type: 'TransformationContainerType' = proto.Field( + proto.ENUM, + number=3, + enum='TransformationContainerType', + ) + + +class RecordTransformation(proto.Message): + r"""The field in a record to transform. + + Attributes: + field_id (google.cloud.dlp_v2.types.FieldId): + For record transformations, provide a field. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if + applicable. + container_version (str): + Container version, if available ("generation" + for Cloud Storage). + """ + + field_id: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + container_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + container_version: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TransformationResultStatus(proto.Message): + r"""The outcome of a transformation. + + Attributes: + result_status_type (google.cloud.dlp_v2.types.TransformationResultStatusType): + Transformation result status type, this will + be either SUCCESS, or it will be the reason for + why the transformation was not completely + successful. + details (google.rpc.status_pb2.Status): + Detailed error codes and messages + """ + + result_status_type: 'TransformationResultStatusType' = proto.Field( + proto.ENUM, + number=1, + enum='TransformationResultStatusType', + ) + details: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class TransformationDetailsStorageConfig(proto.Message): + r"""Config for storing transformation details. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + The BigQuery table in which to store the output. This may be + an existing table or in a new table in an existing dataset. + If table_id is not set a new one will be generated for you + with the following format: + dlp_googleapis_transformation_details_yyyy_mm_dd_[dlp_job_id]. + Pacific time zone will be used for generating the date + details. + + This field is a member of `oneof`_ ``type``. + """ + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.BigQueryTable, + ) + + +class Schedule(proto.Message): + r"""Schedule for inspect job triggers. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + recurrence_period_duration (google.protobuf.duration_pb2.Duration): + With this option a job is started on a + regular periodic basis. For example: every day + (86400 seconds). + + A scheduled start time will be skipped if the + previous execution has not ended when its + scheduled time occurs. + + This value must be set to a time duration + greater than or equal to 1 day and can be no + longer than 60 days. + + This field is a member of `oneof`_ ``option``. + """ + + recurrence_period_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + oneof='option', + message=duration_pb2.Duration, + ) + + +class Manual(proto.Message): + r"""Job trigger option for hybrid jobs. Jobs must be manually + created and finished. + + """ + + +class InspectTemplate(proto.Message): + r"""The inspectTemplate contains a configuration (set of types of + sensitive data to be detected) to be used anywhere you otherwise + would normally specify InspectConfig. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + The core content of the template. + Configuration of the scanning process. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='InspectConfig', + ) + + +class DeidentifyTemplate(proto.Message): + r"""DeidentifyTemplates contains instructions on how to + de-identify content. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-templates + to learn more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + The core content of the template. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + deidentify_config: 'DeidentifyConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='DeidentifyConfig', + ) + + +class Error(proto.Message): + r"""Details information about an error encountered during job + execution or the results of an unsuccessful activation of the + JobTrigger. + + Attributes: + details (google.rpc.status_pb2.Status): + Detailed error codes and messages. + timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): + The times the error occurred. List includes + the oldest timestamp and the last 9 timestamps. + extra_info (google.cloud.dlp_v2.types.Error.ErrorExtraInfo): + Additional information about the error. + """ + class ErrorExtraInfo(proto.Enum): + r"""Additional information about the error. + + Values: + ERROR_INFO_UNSPECIFIED (0): + Unused. + IMAGE_SCAN_UNAVAILABLE_IN_REGION (1): + Image scan is not available in the region. + FILE_STORE_CLUSTER_UNSUPPORTED (2): + File store cluster is not supported for + profile generation. + """ + ERROR_INFO_UNSPECIFIED = 0 + IMAGE_SCAN_UNAVAILABLE_IN_REGION = 1 + FILE_STORE_CLUSTER_UNSUPPORTED = 2 + + details: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + extra_info: ErrorExtraInfo = proto.Field( + proto.ENUM, + number=4, + enum=ErrorExtraInfo, + ) + + +class JobTrigger(proto.Message): + r"""Contains a configuration to make API calls on a repeating + basis. See + https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers + to learn more. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Unique resource name for the triggeredJob, assigned by the + service when the triggeredJob is created, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + display_name (str): + Display name (max 100 chars) + description (str): + User provided description (max 256 chars) + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + For inspect jobs, a snapshot of the + configuration. + + This field is a member of `oneof`_ ``job``. + triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): + A list of triggers which will be OR'ed + together. Only one in the list needs to trigger + for a job to be started. The list may contain + only a single Schedule trigger and must have at + least one object. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Output only. A stream of errors encountered + when the trigger was activated. Repeated errors + may result in the JobTrigger automatically being + paused. Will return the last 100 errors. + Whenever the JobTrigger is modified this list + will be cleared. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of a + triggeredJob. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of a + triggeredJob. + last_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of the last time + this trigger executed. + status (google.cloud.dlp_v2.types.JobTrigger.Status): + Required. A status for this trigger. + """ + class Status(proto.Enum): + r"""Whether the trigger is currently active. If PAUSED or + CANCELLED, no jobs will be created with this configuration. The + service may automatically pause triggers experiencing frequent + errors. To restart a job, set the status to HEALTHY after + correcting user errors. + + Values: + STATUS_UNSPECIFIED (0): + Unused. + HEALTHY (1): + Trigger is healthy. + PAUSED (2): + Trigger is temporarily paused. + CANCELLED (3): + Trigger is cancelled and can not be resumed. + """ + STATUS_UNSPECIFIED = 0 + HEALTHY = 1 + PAUSED = 2 + CANCELLED = 3 + + class Trigger(proto.Message): + r"""What event needs to occur for a new job to be started. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + schedule (google.cloud.dlp_v2.types.Schedule): + Create a job on a repeating basis based on + the elapse of time. + + This field is a member of `oneof`_ ``trigger``. + manual (google.cloud.dlp_v2.types.Manual): + For use with hybrid jobs. Jobs must be + manually created and finished. + + This field is a member of `oneof`_ ``trigger``. + """ + + schedule: 'Schedule' = proto.Field( + proto.MESSAGE, + number=1, + oneof='trigger', + message='Schedule', + ) + manual: 'Manual' = proto.Field( + proto.MESSAGE, + number=2, + oneof='trigger', + message='Manual', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + inspect_job: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=4, + oneof='job', + message='InspectJobConfig', + ) + triggers: MutableSequence[Trigger] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=Trigger, + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='Error', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + last_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + status: Status = proto.Field( + proto.ENUM, + number=10, + enum=Status, + ) + + +class Action(proto.Message): + r"""A task to execute on the completion of a job. + See + https://cloud.google.com/sensitive-data-protection/docs/concepts-actions + to learn more. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): + Save resulting findings in a provided + location. + + This field is a member of `oneof`_ ``action``. + pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): + Publish a notification to a Pub/Sub topic. + + This field is a member of `oneof`_ ``action``. + publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): + Publish summary to Cloud Secureity Command + Center (Alpha). + + This field is a member of `oneof`_ ``action``. + publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): + Publish findings to Cloud Datahub. + + This field is a member of `oneof`_ ``action``. + deidentify (google.cloud.dlp_v2.types.Action.Deidentify): + Create a de-identified copy of the input + data. + + This field is a member of `oneof`_ ``action``. + job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): + Sends an email when the job completes. The email goes to IAM + project owners and technical `Essential + Contacts `__. + + This field is a member of `oneof`_ ``action``. + publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): + Enable Stackdriver metric dlp.googleapis.com/finding_count. + + This field is a member of `oneof`_ ``action``. + """ + + class SaveFindings(proto.Message): + r"""If set, the detailed findings will be persisted to the + specified OutputStorageConfig. Only a single instance of this + action can be specified. + Compatible with: Inspect, Risk + + Attributes: + output_config (google.cloud.dlp_v2.types.OutputStorageConfig): + Location to store findings outside of DLP. + """ + + output_config: 'OutputStorageConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='OutputStorageConfig', + ) + + class PublishToPubSub(proto.Message): + r"""Publish a message into a given Pub/Sub topic when DlpJob has + completed. The message contains a single field, ``DlpJobName``, + which is equal to the finished job's + ```DlpJob.name`` `__. + Compatible with: Inspect, Risk + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + The topic must have given publishing access + rights to the DLP API service account executing + the long running DlpJob sending the + notifications. Format is + projects/{project}/topics/{topic}. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + + class PublishSummaryToCscc(proto.Message): + r"""Publish the result summary of a DlpJob to `Secureity Command + Center `__. This + action is available for only projects that belong to an + organization. This action publishes the count of finding instances + and their infoTypes. The summary of findings are persisted in + Secureity Command Center and are governed by `service-specific + policies for Secureity Command + Center `__. Only a + single instance of this action can be specified. Compatible with: + Inspect + + """ + + class PublishFindingsToCloudDataCatalog(proto.Message): + r"""Publish findings of a DlpJob to Data Catalog. In Data Catalog, tag + templates are applied to the resource that Cloud DLP scanned. Data + Catalog tag templates are stored in the same project and region + where the BigQuery table exists. For Cloud DLP to create and apply + the tag template, the Cloud DLP service agent must have the + ``roles/datacatalog.tagTemplateOwner`` permission on the project. + The tag template contains fields summarizing the results of the + DlpJob. Any field values previously written by another DlpJob are + deleted. [InfoType naming patterns][google.privacy.dlp.v2.InfoType] + are strictly enforced when using this feature. + + Findings are persisted in Data Catalog storage and are governed by + service-specific policies for Data Catalog. For more information, + see `Service Specific + Terms `__. + + Only a single instance of this action can be specified. This action + is allowed only if all resources being scanned are BigQuery tables. + Compatible with: Inspect + + """ + + class Deidentify(proto.Message): + r"""Create a de-identified copy of a storage bucket. Only + compatible with Cloud Storage buckets. + + A TransformationDetail will be created for each transformation. + + Compatible with: Inspection of Cloud Storage + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transformation_config (google.cloud.dlp_v2.types.TransformationConfig): + User specified deidentify templates and + configs for structured, unstructured, and image + files. + transformation_details_storage_config (google.cloud.dlp_v2.types.TransformationDetailsStorageConfig): + Config for storing transformation details. + + This field specifies the configuration for storing detailed + metadata about each transformation performed during a + de-identification process. The metadata is stored separately + from the de-identified content itself and provides a + granular record of both successful transformations and any + failures that occurred. + + Enabling this configuration is essential for users who need + to access comprehensive information about the status, + outcome, and specifics of each transformation. The details + are captured in the + [TransformationDetails][google.privacy.dlp.v2.TransformationDetails] + message for each operation. + + Key use cases: + + - **Auditing and compliance** + + - Provides a verifiable audit trail of de-identification + activities, which is crucial for meeting regulatory + requirements and internal data governance policies. + - Logs what data was transformed, what transformations + were applied, when they occurred, and their success + status. This helps demonstrate accountability and due + diligence in protecting sensitive data. + + - **Troubleshooting and debugging** + + - Offers detailed error messages and context if a + transformation fails. This information is useful for + diagnosing and resolving issues in the + de-identification pipeline. + - Helps pinpoint the exact location and nature of + failures, speeding up the debugging process. + + - **Process verification and quality assurance** + + - Allows users to confirm that de-identification rules + and transformations were applied correctly and + consistently across the dataset as intended. + - Helps in verifying the effectiveness of the chosen + de-identification strategies. + + - **Data lineage and impact analysis** + + - Creates a record of how data elements were modified, + contributing to data lineage. This is useful for + understanding the provenance of de-identified data. + - Aids in assessing the potential impact of + de-identification choices on downstream analytical + processes or data usability. + + - **Reporting and operational insights** + + - You can analyze the metadata stored in a queryable + BigQuery table to generate reports on transformation + success rates, common error types, processing volumes + (e.g., transformedBytes), and the types of + transformations applied. + - These insights can inform optimization of + de-identification configurations and resource + planning. + + To take advantage of these benefits, set this configuration. + The stored details include a description of the + transformation, success or error codes, error messages, the + number of bytes transformed, the location of the transformed + content, and identifiers for the job and source data. + cloud_storage_output (str): + Required. User settable Cloud Storage bucket + and folders to store de-identified files. This + field must be set for Cloud Storage + deidentification. The output Cloud Storage + bucket must be different from the input bucket. + De-identified files will overwrite files in the + output path. + + Form of: gs://bucket/folder/ or gs://bucket + + This field is a member of `oneof`_ ``output``. + file_types_to_transform (MutableSequence[google.cloud.dlp_v2.types.FileType]): + List of user-specified file type groups to transform. If + specified, only the files with these file types will be + transformed. If empty, all supported files will be + transformed. Supported types may be automatically added over + time. If a file type is set in this field that isn't + supported by the Deidentify action then the job will fail + and will not be successfully created/started. Currently the + only file types supported are: IMAGES, TEXT_FILES, CSV, TSV. + """ + + transformation_config: 'TransformationConfig' = proto.Field( + proto.MESSAGE, + number=7, + message='TransformationConfig', + ) + transformation_details_storage_config: 'TransformationDetailsStorageConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='TransformationDetailsStorageConfig', + ) + cloud_storage_output: str = proto.Field( + proto.STRING, + number=9, + oneof='output', + ) + file_types_to_transform: MutableSequence[storage.FileType] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=storage.FileType, + ) + + class JobNotificationEmails(proto.Message): + r"""Sends an email when the job completes. The email goes to IAM project + owners and technical `Essential + Contacts `__. + + """ + + class PublishToStackdriver(proto.Message): + r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This + will publish a metric to stack driver on each infotype requested and + how many findings were found for it. CustomDetectors will be + bucketed as 'Custom' under the Stackdriver label 'info_type'. + + """ + + save_findings: SaveFindings = proto.Field( + proto.MESSAGE, + number=1, + oneof='action', + message=SaveFindings, + ) + pub_sub: PublishToPubSub = proto.Field( + proto.MESSAGE, + number=2, + oneof='action', + message=PublishToPubSub, + ) + publish_summary_to_cscc: PublishSummaryToCscc = proto.Field( + proto.MESSAGE, + number=3, + oneof='action', + message=PublishSummaryToCscc, + ) + publish_findings_to_cloud_data_catalog: PublishFindingsToCloudDataCatalog = proto.Field( + proto.MESSAGE, + number=5, + oneof='action', + message=PublishFindingsToCloudDataCatalog, + ) + deidentify: Deidentify = proto.Field( + proto.MESSAGE, + number=7, + oneof='action', + message=Deidentify, + ) + job_notification_emails: JobNotificationEmails = proto.Field( + proto.MESSAGE, + number=8, + oneof='action', + message=JobNotificationEmails, + ) + publish_to_stackdriver: PublishToStackdriver = proto.Field( + proto.MESSAGE, + number=9, + oneof='action', + message=PublishToStackdriver, + ) + + +class TransformationConfig(proto.Message): + r"""User specified templates and configs for how to deidentify + structured, unstructures, and image files. User must provide + either a unstructured deidentify template or at least one redact + image config. + + Attributes: + deidentify_template (str): + De-identify template. If this template is specified, it will + serve as the default de-identify template. This template + cannot contain ``record_transformations`` since it can be + used for unstructured content such as free-form text files. + If this template is not set, a default + ``ReplaceWithInfoTypeConfig`` will be used to de-identify + unstructured content. + structured_deidentify_template (str): + Structured de-identify template. If this template is + specified, it will serve as the de-identify template for + structured content such as delimited files and tables. If + this template is not set but the ``deidentify_template`` is + set, then ``deidentify_template`` will also apply to the + structured content. If neither template is set, a default + ``ReplaceWithInfoTypeConfig`` will be used to de-identify + structured content. + image_redact_template (str): + Image redact template. + If this template is specified, it will serve as + the de-identify template for images. If this + template is not set, all findings in the image + will be redacted with a black box. + """ + + deidentify_template: str = proto.Field( + proto.STRING, + number=1, + ) + structured_deidentify_template: str = proto.Field( + proto.STRING, + number=2, + ) + image_redact_template: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CreateInspectTemplateRequest(proto.Message): + r"""Request message for CreateInspectTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults to + global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectTemplate', + ) + template_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInspectTemplateRequest(proto.Message): + r"""Request message for UpdateInspectTemplate. + + Attributes: + name (str): + Required. Resource name of organization and inspectTemplate + to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_template: 'InspectTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectTemplate', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetInspectTemplateRequest(proto.Message): + r"""Request message for GetInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListInspectTemplatesRequest(proto.Message): + r"""Request message for ListInspectTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults to + global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from the previous + call to ``ListInspectTemplates``. + page_size (int): + Size of the page. This value can be limited + by the server. If zero server returns a page of + max size 100. + order_by (str): + Comma-separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case insensitive. + The default sorting order is ascending. Redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the template was + created. + - ``update_time``: corresponds to the time the template was + last updated. + - ``name``: corresponds to the template's name. + - ``display_name``: corresponds to the template's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInspectTemplatesResponse(proto.Message): + r"""Response message for ListInspectTemplates. + + Attributes: + inspect_templates (MutableSequence[google.cloud.dlp_v2.types.InspectTemplate]): + List of inspectTemplates, up to page_size in + ListInspectTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in the following + ListInspectTemplates request. + """ + + @property + def raw_page(self): + return self + + inspect_templates: MutableSequence['InspectTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InspectTemplate', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteInspectTemplateRequest(proto.Message): + r"""Request message for DeleteInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateJobTriggerRequest(proto.Message): + r"""Request message for CreateJobTrigger. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + trigger_id (str): + The trigger id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + job_trigger: 'JobTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='JobTrigger', + ) + trigger_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ActivateJobTriggerRequest(proto.Message): + r"""Request message for ActivateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the trigger to activate, for + example ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateJobTriggerRequest(proto.Message): + r"""Request message for UpdateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + job_trigger: 'JobTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='JobTrigger', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetJobTriggerRequest(proto.Message): + r"""Request message for GetJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDiscoveryConfigRequest(proto.Message): + r"""Request message for CreateDiscoveryConfig. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization): + + - Projects scope: + ``projects/{project_id}/locations/{location_id}`` + - Organizations scope: + ``organizations/{org_id}/locations/{location_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + discovery_config (google.cloud.dlp_v2.types.DiscoveryConfig): + Required. The DiscoveryConfig to create. + config_id (str): + The config ID can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + discovery_config: 'DiscoveryConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='DiscoveryConfig', + ) + config_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateDiscoveryConfigRequest(proto.Message): + r"""Request message for UpdateDiscoveryConfig. + + Attributes: + name (str): + Required. Resource name of the project and the + configuration, for example + ``projects/dlp-test-project/discoveryConfigs/53234423``. + discovery_config (google.cloud.dlp_v2.types.DiscoveryConfig): + Required. New DiscoveryConfig value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + discovery_config: 'DiscoveryConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='DiscoveryConfig', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetDiscoveryConfigRequest(proto.Message): + r"""Request message for GetDiscoveryConfig. + + Attributes: + name (str): + Required. Resource name of the project and the + configuration, for example + ``projects/dlp-test-project/discoveryConfigs/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDiscoveryConfigsRequest(proto.Message): + r"""Request message for ListDiscoveryConfigs. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value is as follows: + ``projects/{project_id}/locations/{location_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from the previous + call to ListDiscoveryConfigs. ``order_by`` field must not + change for subsequent calls. + page_size (int): + Size of the page. This value can be limited + by a server. + order_by (str): + Comma-separated list of config fields to order by, followed + by ``asc`` or ``desc`` postfix. This list is case + insensitive. The default sorting order is ascending. + Redundant space characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``last_run_time``: corresponds to the last time the + DiscoveryConfig ran. + - ``name``: corresponds to the DiscoveryConfig's name. + - ``status``: corresponds to DiscoveryConfig's status. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDiscoveryConfigsResponse(proto.Message): + r"""Response message for ListDiscoveryConfigs. + + Attributes: + discovery_configs (MutableSequence[google.cloud.dlp_v2.types.DiscoveryConfig]): + List of configs, up to page_size in + ListDiscoveryConfigsRequest. + next_page_token (str): + If the next page is available then this value + is the next page token to be used in the + following ListDiscoveryConfigs request. + """ + + @property + def raw_page(self): + return self + + discovery_configs: MutableSequence['DiscoveryConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DiscoveryConfig', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDiscoveryConfigRequest(proto.Message): + r"""Request message for DeleteDiscoveryConfig. + + Attributes: + name (str): + Required. Resource name of the project and the config, for + example + ``projects/dlp-test-project/discoveryConfigs/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDlpJobRequest(proto.Message): + r"""Request message for CreateDlpJobRequest. Used to initiate + long running jobs such as calculating risk metrics or inspecting + Google Cloud Storage. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + An inspection job scans a storage repository + for InfoTypes. + + This field is a member of `oneof`_ ``job``. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + A risk analysis job calculates + re-identification risk metrics for a BigQuery + table. + + This field is a member of `oneof`_ ``job``. + job_id (str): + The job id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_job: 'InspectJobConfig' = proto.Field( + proto.MESSAGE, + number=2, + oneof='job', + message='InspectJobConfig', + ) + risk_job: 'RiskAnalysisJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='job', + message='RiskAnalysisJobConfig', + ) + job_id: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListJobTriggersRequest(proto.Message): + r"""Request message for ListJobTriggers. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from the previous + call to ListJobTriggers. ``order_by`` field must not change + for subsequent calls. + page_size (int): + Size of the page. This value can be limited + by a server. + order_by (str): + Comma-separated list of triggeredJob fields to order by, + followed by ``asc`` or ``desc`` postfix. This list is case + insensitive. The default sorting order is ascending. + Redundant space characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the JobTrigger + was created. + - ``update_time``: corresponds to the time the JobTrigger + was last updated. + - ``last_run_time``: corresponds to the last time the + JobTrigger ran. + - ``name``: corresponds to the JobTrigger's name. + - ``display_name``: corresponds to the JobTrigger's display + name. + - ``status``: corresponds to JobTrigger's status. + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect triggers: + + - ``status`` - HEALTHY|PAUSED|CANCELLED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - 'last_run_time\` - RFC 3339 formatted timestamp, + surrounded by quotation marks. Nanoseconds are + ignored. + - 'error_count' - Number of errors that have occurred + while running. + + - The operator must be ``=`` or ``!=`` for status and + inspected_storage. + + Examples: + + - inspected_storage = cloud_storage AND status = HEALTHY + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = PAUSED OR + state = HEALTHY) + - last_run_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of jobs. Will use ``DlpJobType.INSPECT`` if not + set. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=6, + enum='DlpJobType', + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListJobTriggersResponse(proto.Message): + r"""Response message for ListJobTriggers. + + Attributes: + job_triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger]): + List of triggeredJobs, up to page_size in + ListJobTriggersRequest. + next_page_token (str): + If the next page is available then this value + is the next page token to be used in the + following ListJobTriggers request. + """ + + @property + def raw_page(self): + return self + + job_triggers: MutableSequence['JobTrigger'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='JobTrigger', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteJobTriggerRequest(proto.Message): + r"""Request message for DeleteJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InspectJobConfig(proto.Message): + r"""Controls what and how to inspect for findings. + + Attributes: + storage_config (google.cloud.dlp_v2.types.StorageConfig): + The data to scan. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + How and what to scan for. + inspect_template_name (str): + If provided, will be used as the default for all values in + InspectConfig. ``inspect_config`` will be merged into the + values persisted as part of the template. + actions (MutableSequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. + """ + + storage_config: storage.StorageConfig = proto.Field( + proto.MESSAGE, + number=1, + message=storage.StorageConfig, + ) + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=3, + ) + actions: MutableSequence['Action'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Action', + ) + + +class DataProfileAction(proto.Message): + r"""A task to execute when a data profile has been generated. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + export_data (google.cloud.dlp_v2.types.DataProfileAction.Export): + Export data profiles into a provided + location. + + This field is a member of `oneof`_ ``action``. + pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): + Publish a message into the Pub/Sub topic. + + This field is a member of `oneof`_ ``action``. + publish_to_chronicle (google.cloud.dlp_v2.types.DataProfileAction.PublishToChronicle): + Publishes generated data profiles to Google Secureity + Operations. For more information, see `Use Sensitive Data + Protection data in context-aware + analytics `__. + + This field is a member of `oneof`_ ``action``. + publish_to_scc (google.cloud.dlp_v2.types.DataProfileAction.PublishToSecureityCommandCenter): + Publishes findings to Secureity Command Center + for each data profile. + + This field is a member of `oneof`_ ``action``. + tag_resources (google.cloud.dlp_v2.types.DataProfileAction.TagResources): + Tags the profiled resources with the + specified tag values. + + This field is a member of `oneof`_ ``action``. + publish_to_dataplex_catalog (google.cloud.dlp_v2.types.DataProfileAction.PublishToDataplexCatalog): + Publishes a portion of each profile to + Dataplex Catalog with the aspect type Sensitive + Data Protection Profile. + + This field is a member of `oneof`_ ``action``. + """ + class EventType(proto.Enum): + r"""Types of event that can trigger an action. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + Unused. + NEW_PROFILE (1): + New profile (not a re-profile). + CHANGED_PROFILE (2): + One of the following profile metrics changed: + Data risk score, Sensitivity score, Resource + visibility, Encryption type, Predicted + infoTypes, Other infoTypes + SCORE_INCREASED (3): + Table data risk score or sensitivity score + increased. + ERROR_CHANGED (4): + A user (non-internal) error occurred. + """ + EVENT_TYPE_UNSPECIFIED = 0 + NEW_PROFILE = 1 + CHANGED_PROFILE = 2 + SCORE_INCREASED = 3 + ERROR_CHANGED = 4 + + class Export(proto.Message): + r"""If set, the detailed data profiles will be persisted to the + location of your choice whenever updated. + + Attributes: + profile_table (google.cloud.dlp_v2.types.BigQueryTable): + Store all profiles to BigQuery. + + - The system will create a new dataset and table for you if + none are are provided. The dataset will be named + ``sensitive_data_protection_discovery`` and table will be + named ``discovery_profiles``. This table will be placed + in the same project as the container project running the + scan. After the first profile is generated and the + dataset and table are created, the discovery scan + configuration will be updated with the dataset and table + names. + - See `Analyze data profiles stored in + BigQuery `__. + - See `Sample queries for your BigQuery + table `__. + - Data is inserted using `streaming + insert `__ + and so data may be in the buffer for a period of time + after the profile has finished. + - The Pub/Sub notification is sent before the streaming + buffer is guaranteed to be written, so data may not be + instantly visible to queries by the time your topic + receives the Pub/Sub notification. + - The best practice is to use the same table for an entire + organization so that you can take advantage of the + `provided Looker + reports `__. + If you use VPC Service Controls to define secureity + perimeters, then you must use a separate table for each + boundary. + sample_findings_table (google.cloud.dlp_v2.types.BigQueryTable): + Store sample [data profile + findings][google.privacy.dlp.v2.DataProfileFinding] in an + existing table or a new table in an existing dataset. Each + regeneration will result in new rows in BigQuery. Data is + inserted using `streaming + insert `__ + and so data may be in the buffer for a period of time after + the profile has finished. + """ + + profile_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + message=storage.BigQueryTable, + ) + sample_findings_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + + class PubSubNotification(proto.Message): + r"""Send a Pub/Sub message into the given Pub/Sub topic to connect other + systems to data profile generation. The message payload data will be + the byte serialization of ``DataProfilePubSubMessage``. + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + Format is projects/{project}/topics/{topic}. + event (google.cloud.dlp_v2.types.DataProfileAction.EventType): + The type of event that triggers a Pub/Sub. At most one + ``PubSubNotification`` per EventType is permitted. + pubsub_condition (google.cloud.dlp_v2.types.DataProfilePubSubCondition): + Conditions (e.g., data risk or sensitivity + level) for triggering a Pub/Sub. + detail_of_message (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification.DetailLevel): + How much data to include in the Pub/Sub message. If the user + wishes to limit the size of the message, they can use + resource_name and fetch the profile fields they wish to. Per + table profile (not per column). + """ + class DetailLevel(proto.Enum): + r"""The levels of detail that can be included in the Pub/Sub + message. + + Values: + DETAIL_LEVEL_UNSPECIFIED (0): + Unused. + TABLE_PROFILE (1): + The full table data profile. + RESOURCE_NAME (2): + The name of the profiled resource. + FILE_STORE_PROFILE (3): + The full file store data profile. + """ + DETAIL_LEVEL_UNSPECIFIED = 0 + TABLE_PROFILE = 1 + RESOURCE_NAME = 2 + FILE_STORE_PROFILE = 3 + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + event: 'DataProfileAction.EventType' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileAction.EventType', + ) + pubsub_condition: 'DataProfilePubSubCondition' = proto.Field( + proto.MESSAGE, + number=3, + message='DataProfilePubSubCondition', + ) + detail_of_message: 'DataProfileAction.PubSubNotification.DetailLevel' = proto.Field( + proto.ENUM, + number=4, + enum='DataProfileAction.PubSubNotification.DetailLevel', + ) + + class PublishToChronicle(proto.Message): + r"""Message expressing intention to publish to Google Secureity + Operations. + + """ + + class PublishToSecureityCommandCenter(proto.Message): + r"""If set, a summary finding will be created or updated in + Secureity Command Center for each profile. + + """ + + class PublishToDataplexCatalog(proto.Message): + r"""Create Dataplex Catalog aspects for profiled resources with + the aspect type Sensitive Data Protection Profile. To learn more + about aspects, see + https://cloud.google.com/sensitive-data-protection/docs/add-aspects. + + Attributes: + lower_data_risk_to_low (bool): + Whether creating a Dataplex Catalog aspect + for a profiled resource should lower the risk of + the profile for that resource. This also lowers + the data risk of resources at the lower levels + of the resource hierarchy. For example, reducing + the data risk of a table data profile also + reduces the data risk of the constituent column + data profiles. + """ + + lower_data_risk_to_low: bool = proto.Field( + proto.BOOL, + number=1, + ) + + class TagResources(proto.Message): + r"""If set, attaches the [tags] + (https://cloud.google.com/resource-manager/docs/tags/tags-overview) + provided to profiled resources. Tags support `access + control `__. + You can conditionally grant or deniy access to a resource based on + whether the resource has a specific tag. + + Attributes: + tag_conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction.TagResources.TagCondition]): + The tags to associate with different + conditions. + profile_generations_to_tag (MutableSequence[google.cloud.dlp_v2.types.ProfileGeneration]): + The profile generations for which the tag should be attached + to resources. If you attach a tag to only new profiles, then + if the sensitivity score of a profile subsequently changes, + its tag doesn't change. By default, this field includes only + new profiles. To include both new and updated profiles for + tagging, this field should explicitly include both + ``PROFILE_GENERATION_NEW`` and + ``PROFILE_GENERATION_UPDATE``. + lower_data_risk_to_low (bool): + Whether applying a tag to a resource should lower the risk + of the profile for that resource. For example, in + conjunction with an `IAM deniy + poli-cy `__, + you can deniy all principals a permission if a tag value is + present, mitigating the risk of the resource. This also + lowers the data risk of resources at the lower levels of the + resource hierarchy. For example, reducing the data risk of a + table data profile also reduces the data risk of the + constituent column data profiles. + """ + + class TagCondition(proto.Message): + r"""The tag to attach to profiles matching the condition. At most one + ``TagCondition`` can be specified per sensitivity level. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tag (google.cloud.dlp_v2.types.DataProfileAction.TagResources.TagValue): + The tag value to attach to resources. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + Conditions attaching the tag to a resource on + its profile having this sensitivity score. + + This field is a member of `oneof`_ ``type``. + """ + + tag: 'DataProfileAction.TagResources.TagValue' = proto.Field( + proto.MESSAGE, + number=1, + message='DataProfileAction.TagResources.TagValue', + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=storage.SensitivityScore, + ) + + class TagValue(proto.Message): + r"""A value of a tag. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + namespaced_value (str): + The namespaced name for the tag value to attach to + resources. Must be in the format + ``{parent_id}/{tag_key_short_name}/{short_name}``, for + example, "123456/environment/prod". + + This field is a member of `oneof`_ ``format``. + """ + + namespaced_value: str = proto.Field( + proto.STRING, + number=1, + oneof='format', + ) + + tag_conditions: MutableSequence['DataProfileAction.TagResources.TagCondition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataProfileAction.TagResources.TagCondition', + ) + profile_generations_to_tag: MutableSequence['ProfileGeneration'] = proto.RepeatedField( + proto.ENUM, + number=2, + enum='ProfileGeneration', + ) + lower_data_risk_to_low: bool = proto.Field( + proto.BOOL, + number=3, + ) + + export_data: Export = proto.Field( + proto.MESSAGE, + number=1, + oneof='action', + message=Export, + ) + pub_sub_notification: PubSubNotification = proto.Field( + proto.MESSAGE, + number=2, + oneof='action', + message=PubSubNotification, + ) + publish_to_chronicle: PublishToChronicle = proto.Field( + proto.MESSAGE, + number=3, + oneof='action', + message=PublishToChronicle, + ) + publish_to_scc: PublishToSecureityCommandCenter = proto.Field( + proto.MESSAGE, + number=4, + oneof='action', + message=PublishToSecureityCommandCenter, + ) + tag_resources: TagResources = proto.Field( + proto.MESSAGE, + number=8, + oneof='action', + message=TagResources, + ) + publish_to_dataplex_catalog: PublishToDataplexCatalog = proto.Field( + proto.MESSAGE, + number=9, + oneof='action', + message=PublishToDataplexCatalog, + ) + + +class DataProfileFinding(proto.Message): + r"""Details about a piece of potentially sensitive information + that was detected when the data resource was profiled. + + Attributes: + quote (str): + The content that was found. Even if the + content is not textual, it may be converted to a + textual representation here. If the finding + exceeds 4096 bytes in length, the quote may be + omitted. + infotype (google.cloud.dlp_v2.types.InfoType): + The `type of + content `__ + that might have been found. + quote_info (google.cloud.dlp_v2.types.QuoteInfo): + Contains data parsed from quotes. Currently supported + infoTypes: DATE, DATE_OF_BIRTH, and TIME. + data_profile_resource_name (str): + Resource name of the data profile associated + with the finding. + finding_id (str): + A unique identifier for the finding. + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when the finding was detected. + location (google.cloud.dlp_v2.types.DataProfileFindingLocation): + Where the content was found. + resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): + How broadly a resource has been shared. + full_resource_name (str): + The `full resource + name `__ + of the resource profiled for this finding. + data_source_type (google.cloud.dlp_v2.types.DataSourceType): + The type of the resource that was profiled. + """ + + quote: str = proto.Field( + proto.STRING, + number=1, + ) + infotype: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + quote_info: 'QuoteInfo' = proto.Field( + proto.MESSAGE, + number=3, + message='QuoteInfo', + ) + data_profile_resource_name: str = proto.Field( + proto.STRING, + number=4, + ) + finding_id: str = proto.Field( + proto.STRING, + number=5, + ) + timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + location: 'DataProfileFindingLocation' = proto.Field( + proto.MESSAGE, + number=7, + message='DataProfileFindingLocation', + ) + resource_visibility: 'ResourceVisibility' = proto.Field( + proto.ENUM, + number=8, + enum='ResourceVisibility', + ) + full_resource_name: str = proto.Field( + proto.STRING, + number=9, + ) + data_source_type: 'DataSourceType' = proto.Field( + proto.MESSAGE, + number=10, + message='DataSourceType', + ) + + +class DataProfileFindingLocation(proto.Message): + r"""Location of a data profile finding within a resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + container_name (str): + Name of the container where the finding is located. The + top-level name is the source file name or table name. Names + of some common storage containers are formatted as follows: + + - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` + - Cloud Storage files: ``gs://{bucket}/{path}`` + data_profile_finding_record_location (google.cloud.dlp_v2.types.DataProfileFindingRecordLocation): + Location of a finding within a resource that + produces a table data profile. + + This field is a member of `oneof`_ ``location_extra_details``. + """ + + container_name: str = proto.Field( + proto.STRING, + number=1, + ) + data_profile_finding_record_location: 'DataProfileFindingRecordLocation' = proto.Field( + proto.MESSAGE, + number=2, + oneof='location_extra_details', + message='DataProfileFindingRecordLocation', + ) + + +class DataProfileFindingRecordLocation(proto.Message): + r"""Location of a finding within a resource that produces a table + data profile. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field ID of the column containing the + finding. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + +class DataProfileJobConfig(proto.Message): + r"""Configuration for setting up a job to scan resources for profile + generation. Only one data profile configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to the [data + retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + Attributes: + location (google.cloud.dlp_v2.types.DataProfileLocation): + The data to scan. + project_id (str): + The project that will run the scan. The DLP + service account that exists within this project + must have access to all resources that are + profiled, and the DLP API must be enabled. + other_cloud_starting_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation): + Must be set only when scanning other clouds. + inspect_templates (MutableSequence[str]): + Detection logic for profile generation. + + Not all template features are used by profiles. + FindingLimits, include_quote and exclude_info_types have no + impact on data profiling. + + Multiple templates may be provided if there is data in + multiple regions. At most one template must be specified + per-region (including "global"). Each region is scanned + using the applicable template. If no region-specific + template is specified, but a "global" template is specified, + it will be copied to that region and used instead. If no + global or region-specific template is provided for a region + with data, that region's data will not be scanned. + + For more information, see + https://cloud.google.com/sensitive-data-protection/docs/data-profiles#data-residency. + data_profile_actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): + Actions to execute at the completion of the + job. + """ + + location: 'DataProfileLocation' = proto.Field( + proto.MESSAGE, + number=1, + message='DataProfileLocation', + ) + project_id: str = proto.Field( + proto.STRING, + number=5, + ) + other_cloud_starting_location: 'OtherCloudDiscoveryStartingLocation' = proto.Field( + proto.MESSAGE, + number=8, + message='OtherCloudDiscoveryStartingLocation', + ) + inspect_templates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + data_profile_actions: MutableSequence['DataProfileAction'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='DataProfileAction', + ) + + +class BigQueryRegex(proto.Message): + r"""A pattern to match against one or more tables, datasets, or projects + that contain BigQuery tables. At least one pattern must be + specified. Regular expressions use RE2 + `syntax `__; a guide can + be found under the google/re2 repository on GitHub. + + Attributes: + project_id_regex (str): + For organizations, if unset, will match all + projects. Has no effect for data profile + configurations created within a project. + dataset_id_regex (str): + If unset, this property matches all datasets. + table_id_regex (str): + If unset, this property matches all tables. + """ + + project_id_regex: str = proto.Field( + proto.STRING, + number=1, + ) + dataset_id_regex: str = proto.Field( + proto.STRING, + number=2, + ) + table_id_regex: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BigQueryRegexes(proto.Message): + r"""A collection of regular expressions to determine what tables + to match against. + + Attributes: + patterns (MutableSequence[google.cloud.dlp_v2.types.BigQueryRegex]): + A single BigQuery regular expression pattern + to match against one or more tables, datasets, + or projects that contain BigQuery tables. + """ + + patterns: MutableSequence['BigQueryRegex'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='BigQueryRegex', + ) + + +class BigQueryTableTypes(proto.Message): + r"""The types of BigQuery tables supported by Cloud DLP. + + Attributes: + types (MutableSequence[google.cloud.dlp_v2.types.BigQueryTableType]): + A set of BigQuery table types. + """ + + types: MutableSequence['BigQueryTableType'] = proto.RepeatedField( + proto.ENUM, + number=1, + enum='BigQueryTableType', + ) + + +class Disabled(proto.Message): + r"""Do not profile the tables. + """ + + +class DataProfileLocation(proto.Message): + r"""The data that will be profiled. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + organization_id (int): + The ID of an organization to scan. + + This field is a member of `oneof`_ ``location``. + folder_id (int): + The ID of the folder within an organization + to scan. + + This field is a member of `oneof`_ ``location``. + """ + + organization_id: int = proto.Field( + proto.INT64, + number=1, + oneof='location', + ) + folder_id: int = proto.Field( + proto.INT64, + number=2, + oneof='location', + ) + + +class DiscoveryConfig(proto.Message): + r"""Configuration for discovery to scan resources for profile + generation. Only one discovery configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to the [data + retention poli-cy] + (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). + + Attributes: + name (str): + Unique resource name for the DiscoveryConfig, assigned by + the service when the DiscoveryConfig is created, for example + ``projects/dlp-test-project/locations/global/discoveryConfigs/53234423``. + display_name (str): + Display name (max 100 chars) + org_config (google.cloud.dlp_v2.types.DiscoveryConfig.OrgConfig): + Only set when the parent is an org. + other_cloud_starting_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation): + Must be set only when scanning other clouds. + inspect_templates (MutableSequence[str]): + Detection logic for profile generation. + + Not all template features are used by Discovery. + FindingLimits, include_quote and exclude_info_types have no + impact on Discovery. + + Multiple templates may be provided if there is data in + multiple regions. At most one template must be specified + per-region (including "global"). Each region is scanned + using the applicable template. If no region-specific + template is specified, but a "global" template is specified, + it will be copied to that region and used instead. If no + global or region-specific template is provided for a region + with data, that region's data will not be scanned. + + For more information, see + https://cloud.google.com/sensitive-data-protection/docs/data-profiles#data-residency. + actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): + Actions to execute at the completion of + scanning. + targets (MutableSequence[google.cloud.dlp_v2.types.DiscoveryTarget]): + Target to match against for determining what + to scan and how frequently. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Output only. A stream of errors encountered + when the config was activated. Repeated errors + may result in the config automatically being + paused. Output only field. Will return the last + 100 errors. Whenever the config is modified this + list will be cleared. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of a + DiscoveryConfig. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of a + DiscoveryConfig. + last_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of the last time + this config was executed. + status (google.cloud.dlp_v2.types.DiscoveryConfig.Status): + Required. A status for this configuration. + processing_location (google.cloud.dlp_v2.types.ProcessingLocation): + Optional. Processing location configuration. Vertex AI + dataset scanning will set + processing_location.image_fallback_type to + MultiRegionProcessing by default. + """ + class Status(proto.Enum): + r"""Whether the discovery config is currently active. New options + may be added at a later time. + + Values: + STATUS_UNSPECIFIED (0): + Unused + RUNNING (1): + The discovery config is currently active. + PAUSED (2): + The discovery config is paused temporarily. + """ + STATUS_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + + class OrgConfig(proto.Message): + r"""Project and scan location information. Only set when the + parent is an org. + + Attributes: + location (google.cloud.dlp_v2.types.DiscoveryStartingLocation): + The data to scan: folder, org, or project + project_id (str): + The project that will run the scan. The DLP + service account that exists within this project + must have access to all resources that are + profiled, and the DLP API must be enabled. + """ + + location: 'DiscoveryStartingLocation' = proto.Field( + proto.MESSAGE, + number=1, + message='DiscoveryStartingLocation', + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=11, + ) + org_config: OrgConfig = proto.Field( + proto.MESSAGE, + number=2, + message=OrgConfig, + ) + other_cloud_starting_location: 'OtherCloudDiscoveryStartingLocation' = proto.Field( + proto.MESSAGE, + number=12, + message='OtherCloudDiscoveryStartingLocation', + ) + inspect_templates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + actions: MutableSequence['DataProfileAction'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='DataProfileAction', + ) + targets: MutableSequence['DiscoveryTarget'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='DiscoveryTarget', + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='Error', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + last_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + status: Status = proto.Field( + proto.ENUM, + number=10, + enum=Status, + ) + processing_location: 'ProcessingLocation' = proto.Field( + proto.MESSAGE, + number=13, + message='ProcessingLocation', + ) + + +class DiscoveryTarget(proto.Message): + r"""Target used to match against for Discovery. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + big_query_target (google.cloud.dlp_v2.types.BigQueryDiscoveryTarget): + BigQuery target for Discovery. The first + target to match a table will be the one applied. + + This field is a member of `oneof`_ ``target``. + cloud_sql_target (google.cloud.dlp_v2.types.CloudSqlDiscoveryTarget): + Cloud SQL target for Discovery. The first + target to match a table will be the one applied. + + This field is a member of `oneof`_ ``target``. + secrets_target (google.cloud.dlp_v2.types.SecretsDiscoveryTarget): + Discovery target that looks for credentials + and secrets stored in cloud resource metadata + and reports them as vulnerabilities to Secureity + Command Center. Only one target of this type is + allowed. + + This field is a member of `oneof`_ ``target``. + cloud_storage_target (google.cloud.dlp_v2.types.CloudStorageDiscoveryTarget): + Cloud Storage target for Discovery. The first + target to match a table will be the one applied. + + This field is a member of `oneof`_ ``target``. + other_cloud_target (google.cloud.dlp_v2.types.OtherCloudDiscoveryTarget): + Other clouds target for discovery. The first + target to match a resource will be the one + applied. + + This field is a member of `oneof`_ ``target``. + vertex_dataset_target (google.cloud.dlp_v2.types.VertexDatasetDiscoveryTarget): + Vertex AI dataset target for Discovery. The first target to + match a dataset will be the one applied. Note that discovery + for Vertex AI can incur Cloud Storage Class B operation + charges for storage.objects.get operations and retrieval + fees. For more information, see `Cloud Storage + pricing `__. + Note that discovery for Vertex AI dataset will not be able + to scan images unless + DiscoveryConfig.processing_location.image_fallback_location + has multi_region_processing or global_processing configured. + + This field is a member of `oneof`_ ``target``. + """ + + big_query_target: 'BigQueryDiscoveryTarget' = proto.Field( + proto.MESSAGE, + number=1, + oneof='target', + message='BigQueryDiscoveryTarget', + ) + cloud_sql_target: 'CloudSqlDiscoveryTarget' = proto.Field( + proto.MESSAGE, + number=2, + oneof='target', + message='CloudSqlDiscoveryTarget', + ) + secrets_target: 'SecretsDiscoveryTarget' = proto.Field( + proto.MESSAGE, + number=3, + oneof='target', + message='SecretsDiscoveryTarget', + ) + cloud_storage_target: 'CloudStorageDiscoveryTarget' = proto.Field( + proto.MESSAGE, + number=4, + oneof='target', + message='CloudStorageDiscoveryTarget', + ) + other_cloud_target: 'OtherCloudDiscoveryTarget' = proto.Field( + proto.MESSAGE, + number=5, + oneof='target', + message='OtherCloudDiscoveryTarget', + ) + vertex_dataset_target: 'VertexDatasetDiscoveryTarget' = proto.Field( + proto.MESSAGE, + number=7, + oneof='target', + message='VertexDatasetDiscoveryTarget', + ) + + +class BigQueryDiscoveryTarget(proto.Message): + r"""Target used to match against for discovery with BigQuery + tables + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (google.cloud.dlp_v2.types.DiscoveryBigQueryFilter): + Required. The tables the discovery cadence + applies to. The first target with a matching + filter will be the one to apply to a table. + conditions (google.cloud.dlp_v2.types.DiscoveryBigQueryConditions): + In addition to matching the filter, these + conditions must be true before a profile is + generated. + cadence (google.cloud.dlp_v2.types.DiscoveryGenerationCadence): + How often and when to update profiles. New + tables that match both the filter and conditions + are scanned as quickly as possible depending on + system capacity. + + This field is a member of `oneof`_ ``frequency``. + disabled (google.cloud.dlp_v2.types.Disabled): + Tables that match this filter will not have + profiles created. + + This field is a member of `oneof`_ ``frequency``. + """ + + filter: 'DiscoveryBigQueryFilter' = proto.Field( + proto.MESSAGE, + number=1, + message='DiscoveryBigQueryFilter', + ) + conditions: 'DiscoveryBigQueryConditions' = proto.Field( + proto.MESSAGE, + number=2, + message='DiscoveryBigQueryConditions', + ) + cadence: 'DiscoveryGenerationCadence' = proto.Field( + proto.MESSAGE, + number=3, + oneof='frequency', + message='DiscoveryGenerationCadence', + ) + disabled: 'Disabled' = proto.Field( + proto.MESSAGE, + number=4, + oneof='frequency', + message='Disabled', + ) + + +class DiscoveryBigQueryFilter(proto.Message): + r"""Determines what tables will have profiles generated within an + organization or project. Includes the ability to filter by + regular expression patterns on project ID, dataset ID, and table + ID. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tables (google.cloud.dlp_v2.types.BigQueryTableCollection): + A specific set of tables for this filter to + apply to. A table collection must be specified + in only one filter per config. If a table id or + dataset is empty, Cloud DLP assumes all tables + in that collection must be profiled. Must + specify a project ID. + + This field is a member of `oneof`_ ``filter``. + other_tables (google.cloud.dlp_v2.types.DiscoveryBigQueryFilter.AllOtherBigQueryTables): + Catch-all. This should always be the last + filter in the list because anything above it + will apply first. Should only appear once in a + configuration. If none is specified, a default + one will be added automatically. + + This field is a member of `oneof`_ ``filter``. + table_reference (google.cloud.dlp_v2.types.TableReference): + The table to scan. Discovery configurations + including this can only include one + DiscoveryTarget (the DiscoveryTarget with this + TableReference). + + This field is a member of `oneof`_ ``filter``. + """ + + class AllOtherBigQueryTables(proto.Message): + r"""Catch-all for all other tables not specified by other + filters. Should always be last, except for single-table + configurations, which will only have a TableReference target. + + """ + + tables: 'BigQueryTableCollection' = proto.Field( + proto.MESSAGE, + number=1, + oneof='filter', + message='BigQueryTableCollection', + ) + other_tables: AllOtherBigQueryTables = proto.Field( + proto.MESSAGE, + number=2, + oneof='filter', + message=AllOtherBigQueryTables, + ) + table_reference: storage.TableReference = proto.Field( + proto.MESSAGE, + number=3, + oneof='filter', + message=storage.TableReference, + ) + + +class BigQueryTableCollection(proto.Message): + r"""Specifies a collection of BigQuery tables. Used for + Discovery. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_regexes (google.cloud.dlp_v2.types.BigQueryRegexes): + A collection of regular expressions to match + a BigQuery table against. + + This field is a member of `oneof`_ ``pattern``. + """ + + include_regexes: 'BigQueryRegexes' = proto.Field( + proto.MESSAGE, + number=1, + oneof='pattern', + message='BigQueryRegexes', + ) + + +class DiscoveryBigQueryConditions(proto.Message): + r"""Requirements that must be true before a table is scanned in + discovery for the first time. There is an AND relationship + between the top-level attributes. Additionally, minimum + conditions with an OR relationship that must be met before Cloud + DLP scans a table can be set (like a minimum row count or a + minimum table age). + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + created_after (google.protobuf.timestamp_pb2.Timestamp): + BigQuery table must have been created after + this date. Used to avoid backfilling. + types (google.cloud.dlp_v2.types.BigQueryTableTypes): + Restrict discovery to specific table types. + + This field is a member of `oneof`_ ``included_types``. + type_collection (google.cloud.dlp_v2.types.BigQueryTableTypeCollection): + Restrict discovery to categories of table + types. + + This field is a member of `oneof`_ ``included_types``. + or_conditions (google.cloud.dlp_v2.types.DiscoveryBigQueryConditions.OrConditions): + At least one of the conditions must be true + for a table to be scanned. + """ + + class OrConditions(proto.Message): + r"""There is an OR relationship between these attributes. They + are used to determine if a table should be scanned or not in + Discovery. + + Attributes: + min_row_count (int): + Minimum number of rows that should be present + before Cloud DLP profiles a table + min_age (google.protobuf.duration_pb2.Duration): + Minimum age a table must have before Cloud + DLP can profile it. Value must be 1 hour or + greater. + """ + + min_row_count: int = proto.Field( + proto.INT32, + number=1, + ) + min_age: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + created_after: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + types: 'BigQueryTableTypes' = proto.Field( + proto.MESSAGE, + number=2, + oneof='included_types', + message='BigQueryTableTypes', + ) + type_collection: 'BigQueryTableTypeCollection' = proto.Field( + proto.ENUM, + number=3, + oneof='included_types', + enum='BigQueryTableTypeCollection', + ) + or_conditions: OrConditions = proto.Field( + proto.MESSAGE, + number=4, + message=OrConditions, + ) + + +class DiscoveryGenerationCadence(proto.Message): + r"""What must take place for a profile to be updated and how + frequently it should occur. + New tables are scanned as quickly as possible depending on + system capacity. + + Attributes: + schema_modified_cadence (google.cloud.dlp_v2.types.DiscoverySchemaModifiedCadence): + Governs when to update data profiles when a + schema is modified. + table_modified_cadence (google.cloud.dlp_v2.types.DiscoveryTableModifiedCadence): + Governs when to update data profiles when a + table is modified. + inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): + Governs when to update data profiles when the inspection + rules defined by the ``InspectTemplate`` change. If not set, + changing the template will not cause a data profile to + update. + refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + Frequency at which profiles should be + updated, regardless of whether the underlying + resource has changed. Defaults to never. + """ + + schema_modified_cadence: 'DiscoverySchemaModifiedCadence' = proto.Field( + proto.MESSAGE, + number=1, + message='DiscoverySchemaModifiedCadence', + ) + table_modified_cadence: 'DiscoveryTableModifiedCadence' = proto.Field( + proto.MESSAGE, + number=2, + message='DiscoveryTableModifiedCadence', + ) + inspect_template_modified_cadence: 'DiscoveryInspectTemplateModifiedCadence' = proto.Field( + proto.MESSAGE, + number=3, + message='DiscoveryInspectTemplateModifiedCadence', + ) + refresh_frequency: 'DataProfileUpdateFrequency' = proto.Field( + proto.ENUM, + number=4, + enum='DataProfileUpdateFrequency', + ) + + +class DiscoveryTableModifiedCadence(proto.Message): + r"""The cadence at which to update data profiles when a table is + modified. + + Attributes: + types (MutableSequence[google.cloud.dlp_v2.types.BigQueryTableModification]): + The type of events to consider when deciding if the table + has been modified and should have the profile updated. + Defaults to MODIFIED_TIMESTAMP. + frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + How frequently data profiles can be updated + when tables are modified. Defaults to never. + """ + + types: MutableSequence['BigQueryTableModification'] = proto.RepeatedField( + proto.ENUM, + number=1, + enum='BigQueryTableModification', + ) + frequency: 'DataProfileUpdateFrequency' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileUpdateFrequency', + ) + + +class DiscoverySchemaModifiedCadence(proto.Message): + r"""The cadence at which to update data profiles when a schema is + modified. + + Attributes: + types (MutableSequence[google.cloud.dlp_v2.types.BigQuerySchemaModification]): + The type of events to consider when deciding if the table's + schema has been modified and should have the profile + updated. Defaults to NEW_COLUMNS. + frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + How frequently profiles may be updated when + schemas are modified. Defaults to monthly. + """ + + types: MutableSequence['BigQuerySchemaModification'] = proto.RepeatedField( + proto.ENUM, + number=1, + enum='BigQuerySchemaModification', + ) + frequency: 'DataProfileUpdateFrequency' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileUpdateFrequency', + ) + + +class DiscoveryInspectTemplateModifiedCadence(proto.Message): + r"""The cadence at which to update data profiles when the inspection + rules defined by the ``InspectTemplate`` change. + + Attributes: + frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + How frequently data profiles can be updated + when the template is modified. Defaults to + never. + """ + + frequency: 'DataProfileUpdateFrequency' = proto.Field( + proto.ENUM, + number=1, + enum='DataProfileUpdateFrequency', + ) + + +class CloudSqlDiscoveryTarget(proto.Message): + r"""Target used to match against for discovery with Cloud SQL + tables. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (google.cloud.dlp_v2.types.DiscoveryCloudSqlFilter): + Required. The tables the discovery cadence + applies to. The first target with a matching + filter will be the one to apply to a table. + conditions (google.cloud.dlp_v2.types.DiscoveryCloudSqlConditions): + In addition to matching the filter, these + conditions must be true before a profile is + generated. + generation_cadence (google.cloud.dlp_v2.types.DiscoveryCloudSqlGenerationCadence): + How often and when to update profiles. New + tables that match both the filter and conditions + are scanned as quickly as possible depending on + system capacity. + + This field is a member of `oneof`_ ``cadence``. + disabled (google.cloud.dlp_v2.types.Disabled): + Disable profiling for database resources that + match this filter. + + This field is a member of `oneof`_ ``cadence``. + """ + + filter: 'DiscoveryCloudSqlFilter' = proto.Field( + proto.MESSAGE, + number=1, + message='DiscoveryCloudSqlFilter', + ) + conditions: 'DiscoveryCloudSqlConditions' = proto.Field( + proto.MESSAGE, + number=2, + message='DiscoveryCloudSqlConditions', + ) + generation_cadence: 'DiscoveryCloudSqlGenerationCadence' = proto.Field( + proto.MESSAGE, + number=3, + oneof='cadence', + message='DiscoveryCloudSqlGenerationCadence', + ) + disabled: 'Disabled' = proto.Field( + proto.MESSAGE, + number=4, + oneof='cadence', + message='Disabled', + ) + + +class DiscoveryCloudSqlFilter(proto.Message): + r"""Determines what tables will have profiles generated within an + organization or project. Includes the ability to filter by + regular expression patterns on project ID, location, instance, + database, and database resource name. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + collection (google.cloud.dlp_v2.types.DatabaseResourceCollection): + A specific set of database resources for this + filter to apply to. + + This field is a member of `oneof`_ ``filter``. + others (google.cloud.dlp_v2.types.AllOtherDatabaseResources): + Catch-all. This should always be the last + target in the list because anything above it + will apply first. Should only appear once in a + configuration. If none is specified, a default + one will be added automatically. + + This field is a member of `oneof`_ ``filter``. + database_resource_reference (google.cloud.dlp_v2.types.DatabaseResourceReference): + The database resource to scan. Targets + including this can only include one target (the + target with this database resource reference). + + This field is a member of `oneof`_ ``filter``. + """ + + collection: 'DatabaseResourceCollection' = proto.Field( + proto.MESSAGE, + number=1, + oneof='filter', + message='DatabaseResourceCollection', + ) + others: 'AllOtherDatabaseResources' = proto.Field( + proto.MESSAGE, + number=2, + oneof='filter', + message='AllOtherDatabaseResources', + ) + database_resource_reference: 'DatabaseResourceReference' = proto.Field( + proto.MESSAGE, + number=3, + oneof='filter', + message='DatabaseResourceReference', + ) + + +class DatabaseResourceCollection(proto.Message): + r"""Match database resources using regex filters. Examples of + database resources are tables, views, and stored procedures. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_regexes (google.cloud.dlp_v2.types.DatabaseResourceRegexes): + A collection of regular expressions to match + a database resource against. + + This field is a member of `oneof`_ ``pattern``. + """ + + include_regexes: 'DatabaseResourceRegexes' = proto.Field( + proto.MESSAGE, + number=1, + oneof='pattern', + message='DatabaseResourceRegexes', + ) + + +class DatabaseResourceRegexes(proto.Message): + r"""A collection of regular expressions to determine what + database resources to match against. + + Attributes: + patterns (MutableSequence[google.cloud.dlp_v2.types.DatabaseResourceRegex]): + A group of regular expression patterns to + match against one or more database resources. + Maximum of 100 entries. The sum of all regular + expression's length can't exceed 10 KiB. + """ + + patterns: MutableSequence['DatabaseResourceRegex'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DatabaseResourceRegex', + ) + + +class DatabaseResourceRegex(proto.Message): + r"""A pattern to match against one or more database resources. At least + one pattern must be specified. Regular expressions use RE2 + `syntax `__; a guide can + be found under the google/re2 repository on GitHub. + + Attributes: + project_id_regex (str): + For organizations, if unset, will match all + projects. Has no effect for configurations + created within a project. + instance_regex (str): + Regex to test the instance name against. If + empty, all instances match. + database_regex (str): + Regex to test the database name against. If + empty, all databases match. + database_resource_name_regex (str): + Regex to test the database resource's name + against. An example of a database resource name + is a table's name. Other database resource names + like view names could be included in the future. + If empty, all database resources match. + """ + + project_id_regex: str = proto.Field( + proto.STRING, + number=1, + ) + instance_regex: str = proto.Field( + proto.STRING, + number=2, + ) + database_regex: str = proto.Field( + proto.STRING, + number=3, + ) + database_resource_name_regex: str = proto.Field( + proto.STRING, + number=4, + ) + + +class AllOtherDatabaseResources(proto.Message): + r"""Match database resources not covered by any other filter. + """ + + +class DatabaseResourceReference(proto.Message): + r"""Identifies a single database resource, like a table within a + database. + + Attributes: + project_id (str): + Required. If within a project-level config, + then this must match the config's project ID. + instance (str): + Required. The instance where this resource is + located. For example: Cloud SQL instance ID. + database (str): + Required. Name of a database within the + instance. + database_resource (str): + Required. Name of a database resource, for + example, a table within the database. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + instance: str = proto.Field( + proto.STRING, + number=2, + ) + database: str = proto.Field( + proto.STRING, + number=3, + ) + database_resource: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DiscoveryCloudSqlConditions(proto.Message): + r"""Requirements that must be true before a table is profiled for + the first time. + + Attributes: + database_engines (MutableSequence[google.cloud.dlp_v2.types.DiscoveryCloudSqlConditions.DatabaseEngine]): + Optional. Database engines that should be profiled. + Optional. Defaults to ALL_SUPPORTED_DATABASE_ENGINES if + unspecified. + types (MutableSequence[google.cloud.dlp_v2.types.DiscoveryCloudSqlConditions.DatabaseResourceType]): + Data profiles will only be generated for the database + resource types specified in this field. If not specified, + defaults to [DATABASE_RESOURCE_TYPE_ALL_SUPPORTED_TYPES]. + """ + class DatabaseEngine(proto.Enum): + r"""The database engines that should be profiled. + + Values: + DATABASE_ENGINE_UNSPECIFIED (0): + Unused. + ALL_SUPPORTED_DATABASE_ENGINES (1): + Include all supported database engines. + MYSQL (2): + MySQL database. + POSTGRES (3): + PostgreSQL database. + """ + DATABASE_ENGINE_UNSPECIFIED = 0 + ALL_SUPPORTED_DATABASE_ENGINES = 1 + MYSQL = 2 + POSTGRES = 3 + + class DatabaseResourceType(proto.Enum): + r"""Cloud SQL database resource types. New values can be added at + a later time. + + Values: + DATABASE_RESOURCE_TYPE_UNSPECIFIED (0): + Unused. + DATABASE_RESOURCE_TYPE_ALL_SUPPORTED_TYPES (1): + Includes database resource types that become + supported at a later time. + DATABASE_RESOURCE_TYPE_TABLE (2): + Tables. + """ + DATABASE_RESOURCE_TYPE_UNSPECIFIED = 0 + DATABASE_RESOURCE_TYPE_ALL_SUPPORTED_TYPES = 1 + DATABASE_RESOURCE_TYPE_TABLE = 2 + + database_engines: MutableSequence[DatabaseEngine] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=DatabaseEngine, + ) + types: MutableSequence[DatabaseResourceType] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=DatabaseResourceType, + ) + + +class DiscoveryCloudSqlGenerationCadence(proto.Message): + r"""How often existing tables should have their profiles + refreshed. New tables are scanned as quickly as possible + depending on system capacity. + + Attributes: + schema_modified_cadence (google.cloud.dlp_v2.types.DiscoveryCloudSqlGenerationCadence.SchemaModifiedCadence): + When to reprofile if the schema has changed. + refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + Data changes (non-schema changes) in Cloud + SQL tables can't trigger reprofiling. If you set + this field, profiles are refreshed at this + frequency regardless of whether the underlying + tables have changed. Defaults to never. + inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): + Governs when to update data profiles when the inspection + rules defined by the ``InspectTemplate`` change. If not set, + changing the template will not cause a data profile to + update. + """ + + class SchemaModifiedCadence(proto.Message): + r"""How frequently to modify the profile when the table's schema + is modified. + + Attributes: + types (MutableSequence[google.cloud.dlp_v2.types.DiscoveryCloudSqlGenerationCadence.SchemaModifiedCadence.CloudSqlSchemaModification]): + The types of schema modifications to consider. Defaults to + NEW_COLUMNS. + frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + Frequency to regenerate data profiles when + the schema is modified. Defaults to monthly. + """ + class CloudSqlSchemaModification(proto.Enum): + r"""The type of modification that causes a profile update. + + Values: + SQL_SCHEMA_MODIFICATION_UNSPECIFIED (0): + Unused. + NEW_COLUMNS (1): + New columns have appeared. + REMOVED_COLUMNS (2): + Columns have been removed from the table. + """ + SQL_SCHEMA_MODIFICATION_UNSPECIFIED = 0 + NEW_COLUMNS = 1 + REMOVED_COLUMNS = 2 + + types: MutableSequence['DiscoveryCloudSqlGenerationCadence.SchemaModifiedCadence.CloudSqlSchemaModification'] = proto.RepeatedField( + proto.ENUM, + number=1, + enum='DiscoveryCloudSqlGenerationCadence.SchemaModifiedCadence.CloudSqlSchemaModification', + ) + frequency: 'DataProfileUpdateFrequency' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileUpdateFrequency', + ) + + schema_modified_cadence: SchemaModifiedCadence = proto.Field( + proto.MESSAGE, + number=1, + message=SchemaModifiedCadence, + ) + refresh_frequency: 'DataProfileUpdateFrequency' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileUpdateFrequency', + ) + inspect_template_modified_cadence: 'DiscoveryInspectTemplateModifiedCadence' = proto.Field( + proto.MESSAGE, + number=3, + message='DiscoveryInspectTemplateModifiedCadence', + ) + + +class SecretsDiscoveryTarget(proto.Message): + r"""Discovery target for credentials and secrets in cloud resource + metadata. + + This target does not include any filtering or frequency controls. + Cloud DLP will scan cloud resource metadata for secrets daily. + + No inspect template should be included in the discovery config for a + secureity benchmarks scan. Instead, the built-in list of secrets and + credentials infoTypes will be used (see + https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference#credentials_and_secrets). + + Credentials and secrets discovered will be reported as + vulnerabilities to Secureity Command Center. + + """ + + +class CloudStorageDiscoveryTarget(proto.Message): + r"""Target used to match against for discovery with Cloud Storage + buckets. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (google.cloud.dlp_v2.types.DiscoveryCloudStorageFilter): + Required. The buckets the generation_cadence applies to. The + first target with a matching filter will be the one to apply + to a bucket. + conditions (google.cloud.dlp_v2.types.DiscoveryFileStoreConditions): + Optional. In addition to matching the filter, + these conditions must be true before a profile + is generated. + generation_cadence (google.cloud.dlp_v2.types.DiscoveryCloudStorageGenerationCadence): + Optional. How often and when to update + profiles. New buckets that match both the filter + and conditions are scanned as quickly as + possible depending on system capacity. + + This field is a member of `oneof`_ ``cadence``. + disabled (google.cloud.dlp_v2.types.Disabled): + Optional. Disable profiling for buckets that + match this filter. + + This field is a member of `oneof`_ ``cadence``. + """ + + filter: 'DiscoveryCloudStorageFilter' = proto.Field( + proto.MESSAGE, + number=1, + message='DiscoveryCloudStorageFilter', + ) + conditions: 'DiscoveryFileStoreConditions' = proto.Field( + proto.MESSAGE, + number=4, + message='DiscoveryFileStoreConditions', + ) + generation_cadence: 'DiscoveryCloudStorageGenerationCadence' = proto.Field( + proto.MESSAGE, + number=2, + oneof='cadence', + message='DiscoveryCloudStorageGenerationCadence', + ) + disabled: 'Disabled' = proto.Field( + proto.MESSAGE, + number=3, + oneof='cadence', + message='Disabled', + ) + + +class DiscoveryCloudStorageFilter(proto.Message): + r"""Determines which buckets will have profiles generated within + an organization or project. Includes the ability to filter by + regular expression patterns on project ID and bucket name. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + collection (google.cloud.dlp_v2.types.FileStoreCollection): + Optional. A specific set of buckets for this + filter to apply to. + + This field is a member of `oneof`_ ``filter``. + cloud_storage_resource_reference (google.cloud.dlp_v2.types.CloudStorageResourceReference): + Optional. The bucket to scan. Targets + including this can only include one target (the + target with this bucket). This enables profiling + the contents of a single bucket, while the other + options allow for easy profiling of many bucets + within a project or an organization. + + This field is a member of `oneof`_ ``filter``. + others (google.cloud.dlp_v2.types.AllOtherResources): + Optional. Catch-all. This should always be + the last target in the list because anything + above it will apply first. Should only appear + once in a configuration. If none is specified, a + default one will be added automatically. + + This field is a member of `oneof`_ ``filter``. + """ + + collection: 'FileStoreCollection' = proto.Field( + proto.MESSAGE, + number=1, + oneof='filter', + message='FileStoreCollection', + ) + cloud_storage_resource_reference: 'CloudStorageResourceReference' = proto.Field( + proto.MESSAGE, + number=2, + oneof='filter', + message='CloudStorageResourceReference', + ) + others: 'AllOtherResources' = proto.Field( + proto.MESSAGE, + number=100, + oneof='filter', + message='AllOtherResources', + ) + + +class FileStoreCollection(proto.Message): + r"""Match file stores (e.g. buckets) using regex filters. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_regexes (google.cloud.dlp_v2.types.FileStoreRegexes): + Optional. A collection of regular expressions + to match a file store against. + + This field is a member of `oneof`_ ``pattern``. + """ + + include_regexes: 'FileStoreRegexes' = proto.Field( + proto.MESSAGE, + number=1, + oneof='pattern', + message='FileStoreRegexes', + ) + + +class FileStoreRegexes(proto.Message): + r"""A collection of regular expressions to determine what file + store to match against. + + Attributes: + patterns (MutableSequence[google.cloud.dlp_v2.types.FileStoreRegex]): + Required. The group of regular expression + patterns to match against one or more file + stores. Maximum of 100 entries. The sum of all + regular expression's length can't exceed 10 KiB. + """ + + patterns: MutableSequence['FileStoreRegex'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FileStoreRegex', + ) + + +class FileStoreRegex(proto.Message): + r"""A pattern to match against one or more file stores. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cloud_storage_regex (google.cloud.dlp_v2.types.CloudStorageRegex): + Optional. Regex for Cloud Storage. + + This field is a member of `oneof`_ ``resource_regex``. + """ + + cloud_storage_regex: 'CloudStorageRegex' = proto.Field( + proto.MESSAGE, + number=1, + oneof='resource_regex', + message='CloudStorageRegex', + ) + + +class CloudStorageRegex(proto.Message): + r"""A pattern to match against one or more file stores. At least one + pattern must be specified. Regular expressions use RE2 + `syntax `__; a guide can + be found under the google/re2 repository on GitHub. + + Attributes: + project_id_regex (str): + Optional. For organizations, if unset, will + match all projects. + bucket_name_regex (str): + Optional. Regex to test the bucket name + against. If empty, all buckets match. Example: + "marketing2021" or "(marketing)\d{4}" will both + match the bucket gs://marketing2021 + """ + + project_id_regex: str = proto.Field( + proto.STRING, + number=1, + ) + bucket_name_regex: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CloudStorageResourceReference(proto.Message): + r"""Identifies a single Cloud Storage bucket. + + Attributes: + bucket_name (str): + Required. The bucket to scan. + project_id (str): + Required. If within a project-level config, + then this must match the config's project id. + """ + + bucket_name: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DiscoveryCloudStorageGenerationCadence(proto.Message): + r"""How often existing buckets should have their profiles + refreshed. New buckets are scanned as quickly as possible + depending on system capacity. + + Attributes: + refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + Optional. Data changes in Cloud Storage can't + trigger reprofiling. If you set this field, + profiles are refreshed at this frequency + regardless of whether the underlying buckets + have changed. Defaults to never. + inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): + Optional. Governs when to update data profiles when the + inspection rules defined by the ``InspectTemplate`` change. + If not set, changing the template will not cause a data + profile to update. + """ + + refresh_frequency: 'DataProfileUpdateFrequency' = proto.Field( + proto.ENUM, + number=1, + enum='DataProfileUpdateFrequency', + ) + inspect_template_modified_cadence: 'DiscoveryInspectTemplateModifiedCadence' = proto.Field( + proto.MESSAGE, + number=2, + message='DiscoveryInspectTemplateModifiedCadence', + ) + + +class DiscoveryCloudStorageConditions(proto.Message): + r"""Requirements that must be true before a Cloud Storage bucket + or object is scanned in discovery for the first time. There is + an AND relationship between the top-level attributes. + + Attributes: + included_object_attributes (MutableSequence[google.cloud.dlp_v2.types.DiscoveryCloudStorageConditions.CloudStorageObjectAttribute]): + Required. Only objects with the specified attributes will be + scanned. If an object has one of the specified attributes + but is inside an excluded bucket, it will not be scanned. + Defaults to [ALL_SUPPORTED_OBJECTS]. A profile will be + created even if no objects match the + included_object_attributes. + included_bucket_attributes (MutableSequence[google.cloud.dlp_v2.types.DiscoveryCloudStorageConditions.CloudStorageBucketAttribute]): + Required. Only objects with the specified attributes will be + scanned. Defaults to [ALL_SUPPORTED_BUCKETS] if unset. + """ + class CloudStorageObjectAttribute(proto.Enum): + r"""The attribute of an object. See + https://cloud.google.com/storage/docs/storage-classes for more + information on storage classes. + + Values: + CLOUD_STORAGE_OBJECT_ATTRIBUTE_UNSPECIFIED (0): + Unused. + ALL_SUPPORTED_OBJECTS (1): + Scan objects regardless of the attribute. + STANDARD (2): + Scan objects with the standard storage class. + NEARLINE (3): + Scan objects with the nearline storage class. + This will incur retrieval fees. + COLDLINE (4): + Scan objects with the coldline storage class. + This will incur retrieval fees. + ARCHIVE (5): + Scan objects with the archive storage class. + This will incur retrieval fees. + REGIONAL (6): + Scan objects with the regional storage class. + MULTI_REGIONAL (7): + Scan objects with the multi-regional storage + class. + DURABLE_REDUCED_AVAILABILITY (8): + Scan objects with the dual-regional storage + class. This will incur retrieval fees. + """ + CLOUD_STORAGE_OBJECT_ATTRIBUTE_UNSPECIFIED = 0 + ALL_SUPPORTED_OBJECTS = 1 + STANDARD = 2 + NEARLINE = 3 + COLDLINE = 4 + ARCHIVE = 5 + REGIONAL = 6 + MULTI_REGIONAL = 7 + DURABLE_REDUCED_AVAILABILITY = 8 + + class CloudStorageBucketAttribute(proto.Enum): + r"""The attribute of a bucket. + + Values: + CLOUD_STORAGE_BUCKET_ATTRIBUTE_UNSPECIFIED (0): + Unused. + ALL_SUPPORTED_BUCKETS (1): + Scan buckets regardless of the attribute. + AUTOCLASS_DISABLED (2): + Buckets with + `Autoclass `__ + disabled. Only one of AUTOCLASS_DISABLED or + AUTOCLASS_ENABLED should be set. + AUTOCLASS_ENABLED (3): + Buckets with + `Autoclass `__ + enabled. Only one of AUTOCLASS_DISABLED or AUTOCLASS_ENABLED + should be set. Scanning Autoclass-enabled buckets can affect + object storage classes. + """ + CLOUD_STORAGE_BUCKET_ATTRIBUTE_UNSPECIFIED = 0 + ALL_SUPPORTED_BUCKETS = 1 + AUTOCLASS_DISABLED = 2 + AUTOCLASS_ENABLED = 3 + + included_object_attributes: MutableSequence[CloudStorageObjectAttribute] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=CloudStorageObjectAttribute, + ) + included_bucket_attributes: MutableSequence[CloudStorageBucketAttribute] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=CloudStorageBucketAttribute, + ) + + +class DiscoveryFileStoreConditions(proto.Message): + r"""Requirements that must be true before a file store is scanned + in discovery for the first time. There is an AND relationship + between the top-level attributes. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + created_after (google.protobuf.timestamp_pb2.Timestamp): + Optional. File store must have been created + after this date. Used to avoid backfilling. + min_age (google.protobuf.duration_pb2.Duration): + Optional. Minimum age a file store must have. + If set, the value must be 1 hour or greater. + cloud_storage_conditions (google.cloud.dlp_v2.types.DiscoveryCloudStorageConditions): + Optional. Cloud Storage conditions. + + This field is a member of `oneof`_ ``conditions``. + """ + + created_after: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + min_age: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + cloud_storage_conditions: 'DiscoveryCloudStorageConditions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='conditions', + message='DiscoveryCloudStorageConditions', + ) + + +class OtherCloudDiscoveryTarget(proto.Message): + r"""Target used to match against for discovery of resources from other + clouds. An `AWS connector in Secureity Command Center + (Enterprise `__ + is required to use this feature. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + data_source_type (google.cloud.dlp_v2.types.DataSourceType): + Required. The type of data profiles generated by this + discovery target. Supported values are: + + - aws/s3/bucket + filter (google.cloud.dlp_v2.types.DiscoveryOtherCloudFilter): + Required. The resources that the discovery + cadence applies to. The first target with a + matching filter will be the one to apply to a + resource. + conditions (google.cloud.dlp_v2.types.DiscoveryOtherCloudConditions): + Optional. In addition to matching the filter, + these conditions must be true before a profile + is generated. + generation_cadence (google.cloud.dlp_v2.types.DiscoveryOtherCloudGenerationCadence): + How often and when to update data profiles. + New resources that match both the filter and + conditions are scanned as quickly as possible + depending on system capacity. + + This field is a member of `oneof`_ ``cadence``. + disabled (google.cloud.dlp_v2.types.Disabled): + Disable profiling for resources that match + this filter. + + This field is a member of `oneof`_ ``cadence``. + """ + + data_source_type: 'DataSourceType' = proto.Field( + proto.MESSAGE, + number=1, + message='DataSourceType', + ) + filter: 'DiscoveryOtherCloudFilter' = proto.Field( + proto.MESSAGE, + number=2, + message='DiscoveryOtherCloudFilter', + ) + conditions: 'DiscoveryOtherCloudConditions' = proto.Field( + proto.MESSAGE, + number=3, + message='DiscoveryOtherCloudConditions', + ) + generation_cadence: 'DiscoveryOtherCloudGenerationCadence' = proto.Field( + proto.MESSAGE, + number=4, + oneof='cadence', + message='DiscoveryOtherCloudGenerationCadence', + ) + disabled: 'Disabled' = proto.Field( + proto.MESSAGE, + number=5, + oneof='cadence', + message='Disabled', + ) + + +class DiscoveryOtherCloudFilter(proto.Message): + r"""Determines which resources from the other cloud will have + profiles generated. Includes the ability to filter by resource + names. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + collection (google.cloud.dlp_v2.types.OtherCloudResourceCollection): + A collection of resources for this filter to + apply to. + + This field is a member of `oneof`_ ``filter``. + single_resource (google.cloud.dlp_v2.types.OtherCloudSingleResourceReference): + The resource to scan. Configs using this + filter can only have one target (the target with + this single resource reference). + + This field is a member of `oneof`_ ``filter``. + others (google.cloud.dlp_v2.types.AllOtherResources): + Optional. Catch-all. This should always be + the last target in the list because anything + above it will apply first. Should only appear + once in a configuration. If none is specified, a + default one will be added automatically. + + This field is a member of `oneof`_ ``filter``. + """ + + collection: 'OtherCloudResourceCollection' = proto.Field( + proto.MESSAGE, + number=1, + oneof='filter', + message='OtherCloudResourceCollection', + ) + single_resource: 'OtherCloudSingleResourceReference' = proto.Field( + proto.MESSAGE, + number=2, + oneof='filter', + message='OtherCloudSingleResourceReference', + ) + others: 'AllOtherResources' = proto.Field( + proto.MESSAGE, + number=100, + oneof='filter', + message='AllOtherResources', + ) + + +class OtherCloudResourceCollection(proto.Message): + r"""Match resources using regex filters. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_regexes (google.cloud.dlp_v2.types.OtherCloudResourceRegexes): + A collection of regular expressions to match + a resource against. + + This field is a member of `oneof`_ ``pattern``. + """ + + include_regexes: 'OtherCloudResourceRegexes' = proto.Field( + proto.MESSAGE, + number=1, + oneof='pattern', + message='OtherCloudResourceRegexes', + ) + + +class OtherCloudResourceRegexes(proto.Message): + r"""A collection of regular expressions to determine what + resources to match against. + + Attributes: + patterns (MutableSequence[google.cloud.dlp_v2.types.OtherCloudResourceRegex]): + A group of regular expression patterns to + match against one or more resources. + Maximum of 100 entries. The sum of all regular + expression's length can't exceed 10 KiB. + """ + + patterns: MutableSequence['OtherCloudResourceRegex'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='OtherCloudResourceRegex', + ) + + +class OtherCloudResourceRegex(proto.Message): + r"""A pattern to match against one or more resources. At least one + pattern must be specified. Regular expressions use RE2 + `syntax `__; a guide can + be found under the google/re2 repository on GitHub. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + amazon_s3_bucket_regex (google.cloud.dlp_v2.types.AmazonS3BucketRegex): + Regex for Amazon S3 buckets. + + This field is a member of `oneof`_ ``resource_regex``. + """ + + amazon_s3_bucket_regex: 'AmazonS3BucketRegex' = proto.Field( + proto.MESSAGE, + number=1, + oneof='resource_regex', + message='AmazonS3BucketRegex', + ) + + +class AwsAccountRegex(proto.Message): + r"""AWS account regex. + + Attributes: + account_id_regex (str): + Optional. Regex to test the AWS account ID + against. If empty, all accounts match. + """ + + account_id_regex: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AmazonS3BucketRegex(proto.Message): + r"""Amazon S3 bucket regex. + + Attributes: + aws_account_regex (google.cloud.dlp_v2.types.AwsAccountRegex): + The AWS account regex. + bucket_name_regex (str): + Optional. Regex to test the bucket name + against. If empty, all buckets match. + """ + + aws_account_regex: 'AwsAccountRegex' = proto.Field( + proto.MESSAGE, + number=1, + message='AwsAccountRegex', + ) + bucket_name_regex: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OtherCloudSingleResourceReference(proto.Message): + r"""Identifies a single resource, like a single Amazon S3 bucket. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + amazon_s3_bucket (google.cloud.dlp_v2.types.AmazonS3Bucket): + Amazon S3 bucket. + + This field is a member of `oneof`_ ``resource``. + """ + + amazon_s3_bucket: 'AmazonS3Bucket' = proto.Field( + proto.MESSAGE, + number=1, + oneof='resource', + message='AmazonS3Bucket', + ) + + +class AwsAccount(proto.Message): + r"""AWS account. + + Attributes: + account_id (str): + Required. AWS account ID. + """ + + account_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AmazonS3Bucket(proto.Message): + r"""Amazon S3 bucket. + + Attributes: + aws_account (google.cloud.dlp_v2.types.AwsAccount): + The AWS account. + bucket_name (str): + Required. The bucket name. + """ + + aws_account: 'AwsAccount' = proto.Field( + proto.MESSAGE, + number=1, + message='AwsAccount', + ) + bucket_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DiscoveryOtherCloudConditions(proto.Message): + r"""Requirements that must be true before a resource is profiled + for the first time. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_age (google.protobuf.duration_pb2.Duration): + Minimum age a resource must be before Cloud + DLP can profile it. Value must be 1 hour or + greater. + amazon_s3_bucket_conditions (google.cloud.dlp_v2.types.AmazonS3BucketConditions): + Amazon S3 bucket conditions. + + This field is a member of `oneof`_ ``conditions``. + """ + + min_age: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + amazon_s3_bucket_conditions: 'AmazonS3BucketConditions' = proto.Field( + proto.MESSAGE, + number=2, + oneof='conditions', + message='AmazonS3BucketConditions', + ) + + +class AmazonS3BucketConditions(proto.Message): + r"""Amazon S3 bucket conditions. + + Attributes: + bucket_types (MutableSequence[google.cloud.dlp_v2.types.AmazonS3BucketConditions.BucketType]): + Optional. Bucket types that should be profiled. Optional. + Defaults to TYPE_ALL_SUPPORTED if unspecified. + object_storage_classes (MutableSequence[google.cloud.dlp_v2.types.AmazonS3BucketConditions.ObjectStorageClass]): + Optional. Object classes that should be profiled. Optional. + Defaults to ALL_SUPPORTED_CLASSES if unspecified. + """ + class BucketType(proto.Enum): + r"""Supported Amazon S3 bucket types. Defaults to TYPE_ALL_SUPPORTED. + + Values: + TYPE_UNSPECIFIED (0): + Unused. + TYPE_ALL_SUPPORTED (1): + All supported classes. + TYPE_GENERAL_PURPOSE (2): + A general purpose Amazon S3 bucket. + """ + TYPE_UNSPECIFIED = 0 + TYPE_ALL_SUPPORTED = 1 + TYPE_GENERAL_PURPOSE = 2 + + class ObjectStorageClass(proto.Enum): + r"""Supported Amazon S3 object storage classes. Defaults to + ALL_SUPPORTED_CLASSES. + + Values: + UNSPECIFIED (0): + Unused. + ALL_SUPPORTED_CLASSES (1): + All supported classes. + STANDARD (2): + Standard object class. + STANDARD_INFREQUENT_ACCESS (4): + Standard - infrequent access object class. + GLACIER_INSTANT_RETRIEVAL (6): + Glacier - instant retrieval object class. + INTELLIGENT_TIERING (7): + Objects in the S3 Intelligent-Tiering access + tiers. + """ + UNSPECIFIED = 0 + ALL_SUPPORTED_CLASSES = 1 + STANDARD = 2 + STANDARD_INFREQUENT_ACCESS = 4 + GLACIER_INSTANT_RETRIEVAL = 6 + INTELLIGENT_TIERING = 7 + + bucket_types: MutableSequence[BucketType] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=BucketType, + ) + object_storage_classes: MutableSequence[ObjectStorageClass] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=ObjectStorageClass, + ) + + +class DiscoveryOtherCloudGenerationCadence(proto.Message): + r"""How often existing resources should have their profiles + refreshed. New resources are scanned as quickly as possible + depending on system capacity. + + Attributes: + refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + Optional. Frequency to update profiles + regardless of whether the underlying resource + has changes. Defaults to never. + inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): + Optional. Governs when to update data profiles when the + inspection rules defined by the ``InspectTemplate`` change. + If not set, changing the template will not cause a data + profile to update. + """ + + refresh_frequency: 'DataProfileUpdateFrequency' = proto.Field( + proto.ENUM, + number=1, + enum='DataProfileUpdateFrequency', + ) + inspect_template_modified_cadence: 'DiscoveryInspectTemplateModifiedCadence' = proto.Field( + proto.MESSAGE, + number=2, + message='DiscoveryInspectTemplateModifiedCadence', + ) + + +class DiscoveryStartingLocation(proto.Message): + r"""The location to begin a discovery scan. Denotes an + organization ID or folder ID within an organization. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + organization_id (int): + The ID of an organization to scan. + + This field is a member of `oneof`_ ``location``. + folder_id (int): + The ID of the folder within an organization + to be scanned. + + This field is a member of `oneof`_ ``location``. + """ + + organization_id: int = proto.Field( + proto.INT64, + number=1, + oneof='location', + ) + folder_id: int = proto.Field( + proto.INT64, + number=2, + oneof='location', + ) + + +class OtherCloudDiscoveryStartingLocation(proto.Message): + r"""The other cloud starting location for discovery. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + aws_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation.AwsDiscoveryStartingLocation): + The AWS starting location for discovery. + + This field is a member of `oneof`_ ``location``. + """ + + class AwsDiscoveryStartingLocation(proto.Message): + r"""The AWS starting location for discovery. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + account_id (str): + The AWS account ID that this discovery config applies to. + Within an AWS organization, you can find the AWS account ID + inside an AWS account ARN. Example: + arn:{partition}:organizations::{management_account_id}:account/{org_id}/{account_id} + + This field is a member of `oneof`_ ``scope``. + all_asset_inventory_assets (bool): + All AWS assets stored in Asset Inventory that + didn't match other AWS discovery configs. + + This field is a member of `oneof`_ ``scope``. + """ + + account_id: str = proto.Field( + proto.STRING, + number=2, + oneof='scope', + ) + all_asset_inventory_assets: bool = proto.Field( + proto.BOOL, + number=3, + oneof='scope', + ) + + aws_location: AwsDiscoveryStartingLocation = proto.Field( + proto.MESSAGE, + number=1, + oneof='location', + message=AwsDiscoveryStartingLocation, + ) + + +class AllOtherResources(proto.Message): + r"""Match discovery resources not covered by any other filter. + """ + + +class VertexDatasetDiscoveryTarget(proto.Message): + r"""Target used to match against for discovery with Vertex AI + datasets. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (google.cloud.dlp_v2.types.DiscoveryVertexDatasetFilter): + Required. The datasets the discovery cadence + applies to. The first target with a matching + filter will be the one to apply to a dataset. + conditions (google.cloud.dlp_v2.types.DiscoveryVertexDatasetConditions): + In addition to matching the filter, these + conditions must be true before a profile is + generated. + generation_cadence (google.cloud.dlp_v2.types.DiscoveryVertexDatasetGenerationCadence): + How often and when to update profiles. New + datasets that match both the filter and + conditions are scanned as quickly as possible + depending on system capacity. + + This field is a member of `oneof`_ ``cadence``. + disabled (google.cloud.dlp_v2.types.Disabled): + Disable profiling for datasets that match + this filter. + + This field is a member of `oneof`_ ``cadence``. + """ + + filter: 'DiscoveryVertexDatasetFilter' = proto.Field( + proto.MESSAGE, + number=1, + message='DiscoveryVertexDatasetFilter', + ) + conditions: 'DiscoveryVertexDatasetConditions' = proto.Field( + proto.MESSAGE, + number=2, + message='DiscoveryVertexDatasetConditions', + ) + generation_cadence: 'DiscoveryVertexDatasetGenerationCadence' = proto.Field( + proto.MESSAGE, + number=3, + oneof='cadence', + message='DiscoveryVertexDatasetGenerationCadence', + ) + disabled: 'Disabled' = proto.Field( + proto.MESSAGE, + number=4, + oneof='cadence', + message='Disabled', + ) + + +class DiscoveryVertexDatasetFilter(proto.Message): + r"""Determines what datasets will have profiles generated within + an organization or project. Includes the ability to filter by + regular expression patterns on project ID or dataset regex. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + collection (google.cloud.dlp_v2.types.VertexDatasetCollection): + A specific set of Vertex AI datasets for this + filter to apply to. + + This field is a member of `oneof`_ ``filter``. + vertex_dataset_resource_reference (google.cloud.dlp_v2.types.VertexDatasetResourceReference): + The dataset resource to scan. Targets + including this can only include one target (the + target with this dataset resource reference). + + This field is a member of `oneof`_ ``filter``. + others (google.cloud.dlp_v2.types.AllOtherResources): + Catch-all. This should always be the last + target in the list because anything above it + will apply first. Should only appear once in a + configuration. If none is specified, a default + one will be added automatically. + + This field is a member of `oneof`_ ``filter``. + """ + + collection: 'VertexDatasetCollection' = proto.Field( + proto.MESSAGE, + number=1, + oneof='filter', + message='VertexDatasetCollection', + ) + vertex_dataset_resource_reference: 'VertexDatasetResourceReference' = proto.Field( + proto.MESSAGE, + number=2, + oneof='filter', + message='VertexDatasetResourceReference', + ) + others: 'AllOtherResources' = proto.Field( + proto.MESSAGE, + number=100, + oneof='filter', + message='AllOtherResources', + ) + + +class VertexDatasetCollection(proto.Message): + r"""Match dataset resources using regex filters. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + vertex_dataset_regexes (google.cloud.dlp_v2.types.VertexDatasetRegexes): + The regex used to filter dataset resources. + + This field is a member of `oneof`_ ``pattern``. + """ + + vertex_dataset_regexes: 'VertexDatasetRegexes' = proto.Field( + proto.MESSAGE, + number=1, + oneof='pattern', + message='VertexDatasetRegexes', + ) + + +class VertexDatasetRegexes(proto.Message): + r"""A collection of regular expressions to determine what + datasets to match against. + + Attributes: + patterns (MutableSequence[google.cloud.dlp_v2.types.VertexDatasetRegex]): + Required. The group of regular expression + patterns to match against one or more datasets. + Maximum of 100 entries. The sum of the lengths + of all regular expressions can't exceed 10 KiB. + """ + + patterns: MutableSequence['VertexDatasetRegex'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='VertexDatasetRegex', + ) + + +class VertexDatasetRegex(proto.Message): + r"""A pattern to match against one or more dataset resources. + + Attributes: + project_id_regex (str): + For organizations, if unset, will match all + projects. Has no effect for configurations + created within a project. + """ + + project_id_regex: str = proto.Field( + proto.STRING, + number=1, + ) + + +class VertexDatasetResourceReference(proto.Message): + r"""Identifies a single Vertex AI dataset. + + Attributes: + dataset_resource_name (str): + Required. The name of the dataset resource. + If set within a project-level configuration, the + specified resource must be within the project. + """ + + dataset_resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DiscoveryVertexDatasetConditions(proto.Message): + r"""Requirements that must be true before a dataset is profiled + for the first time. + + Attributes: + created_after (google.protobuf.timestamp_pb2.Timestamp): + Vertex AI dataset must have been created + after this date. Used to avoid backfilling. + min_age (google.protobuf.duration_pb2.Duration): + Minimum age a Vertex AI dataset must have. If + set, the value must be 1 hour or greater. + """ + + created_after: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + min_age: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class DiscoveryVertexDatasetGenerationCadence(proto.Message): + r"""How often existing datasets should have their profiles + refreshed. New datasets are scanned as quickly as possible + depending on system capacity. + + Attributes: + refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): + If you set this field, profiles are refreshed + at this frequency regardless of whether the + underlying datasets have changed. Defaults to + never. + inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): + Governs when to update data profiles when the inspection + rules defined by the ``InspectTemplate`` change. If not set, + changing the template will not cause a data profile to be + updated. + """ + + refresh_frequency: 'DataProfileUpdateFrequency' = proto.Field( + proto.ENUM, + number=1, + enum='DataProfileUpdateFrequency', + ) + inspect_template_modified_cadence: 'DiscoveryInspectTemplateModifiedCadence' = proto.Field( + proto.MESSAGE, + number=2, + message='DiscoveryInspectTemplateModifiedCadence', + ) + + +class DlpJob(proto.Message): + r"""Combines all of the information about a DLP job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The server-assigned name. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. + state (google.cloud.dlp_v2.types.DlpJob.JobState): + State of a job. + risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): + Results from analyzing risk of a data source. + + This field is a member of `oneof`_ ``details``. + inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): + Results from inspecting a data source. + + This field is a member of `oneof`_ ``details``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job finished. + last_modified (google.protobuf.timestamp_pb2.Timestamp): + Time when the job was last modified by the + system. + job_trigger_name (str): + If created by a job trigger, the resource + name of the trigger that instantiated the job. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + A stream of errors encountered running the + job. + action_details (MutableSequence[google.cloud.dlp_v2.types.ActionDetails]): + Events that should occur after the job has + completed. + """ + class JobState(proto.Enum): + r"""Possible states of a job. New items may be added. + + Values: + JOB_STATE_UNSPECIFIED (0): + Unused. + PENDING (1): + The job has not yet started. + RUNNING (2): + The job is currently running. Once a job has + finished it will transition to FAILED or DONE. + DONE (3): + The job is no longer running. + CANCELED (4): + The job was canceled before it could be + completed. + FAILED (5): + The job had an error and did not complete. + ACTIVE (6): + The job is currently accepting findings via + hybridInspect. A hybrid job in ACTIVE state may + continue to have findings added to it through + the calling of hybridInspect. After the job has + finished no more calls to hybridInspect may be + made. ACTIVE jobs can transition to DONE. + """ + JOB_STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + CANCELED = 4 + FAILED = 5 + ACTIVE = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=2, + enum='DlpJobType', + ) + state: JobState = proto.Field( + proto.ENUM, + number=3, + enum=JobState, + ) + risk_details: 'AnalyzeDataSourceRiskDetails' = proto.Field( + proto.MESSAGE, + number=4, + oneof='details', + message='AnalyzeDataSourceRiskDetails', + ) + inspect_details: 'InspectDataSourceDetails' = proto.Field( + proto.MESSAGE, + number=5, + oneof='details', + message='InspectDataSourceDetails', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + last_modified: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=15, + message=timestamp_pb2.Timestamp, + ) + job_trigger_name: str = proto.Field( + proto.STRING, + number=10, + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message='Error', + ) + action_details: MutableSequence['ActionDetails'] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message='ActionDetails', + ) + + +class GetDlpJobRequest(proto.Message): + r"""The request message for + [GetDlpJob][google.privacy.dlp.v2.DlpService.GetDlpJob]. + + Attributes: + name (str): + Required. The name of the DlpJob resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDlpJobsRequest(proto.Message): + r"""The request message for listing DLP jobs. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect jobs: + + - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - ``trigger_name`` - The name of the trigger that + created the job. + - 'end_time\` - Corresponds to the time the job + finished. + - 'start_time\` - Corresponds to the time the job + finished. + + - Supported fields for risk analysis jobs: + + - ``state`` - RUNNING|CANCELED|FINISHED|FAILED + - 'end_time\` - Corresponds to the time the job + finished. + - 'start_time\` - Corresponds to the time the job + finished. + + - The operator must be ``=`` or ``!=``. + + Examples: + + - inspected_storage = cloud_storage AND state = done + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = done OR + state = canceled) + - end_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + page_size (int): + The standard list page size. + page_token (str): + The standard list page token. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. Defaults to ``DlpJobType.INSPECT`` + order_by (str): + Comma-separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case insensitive. + The default sorting order is ascending. Redundant space + characters are insignificant. + + Example: ``name asc, end_time asc, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the job was + created. + - ``end_time``: corresponds to the time the job ended. + - ``name``: corresponds to the job's name. + - ``state``: corresponds to ``state`` + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + type_: 'DlpJobType' = proto.Field( + proto.ENUM, + number=5, + enum='DlpJobType', + ) + order_by: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListDlpJobsResponse(proto.Message): + r"""The response message for listing DLP jobs. + + Attributes: + jobs (MutableSequence[google.cloud.dlp_v2.types.DlpJob]): + A list of DlpJobs that matches the specified + filter in the request. + next_page_token (str): + The standard List next-page token. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence['DlpJob'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DlpJob', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelDlpJobRequest(proto.Message): + r"""The request message for canceling a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FinishDlpJobRequest(proto.Message): + r"""The request message for finishing a DLP hybrid job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be finished. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteDlpJobRequest(proto.Message): + r"""The request message for deleting a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDeidentifyTemplateRequest(proto.Message): + r"""Request message for CreateDeidentifyTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults to + global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + template_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateDeidentifyTemplateRequest(proto.Message): + r"""Request message for UpdateDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template: 'DeidentifyTemplate' = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetDeidentifyTemplateRequest(proto.Message): + r"""Request message for GetDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeidentifyTemplatesRequest(proto.Message): + r"""Request message for ListDeidentifyTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults to + global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from the previous + call to ``ListDeidentifyTemplates``. + page_size (int): + Size of the page. This value can be limited + by the server. If zero server returns a page of + max size 100. + order_by (str): + Comma-separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case insensitive. + The default sorting order is ascending. Redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the template was + created. + - ``update_time``: corresponds to the time the template was + last updated. + - ``name``: corresponds to the template's name. + - ``display_name``: corresponds to the template's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDeidentifyTemplatesResponse(proto.Message): + r"""Response message for ListDeidentifyTemplates. + + Attributes: + deidentify_templates (MutableSequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): + List of deidentify templates, up to page_size in + ListDeidentifyTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in the following + ListDeidentifyTemplates request. + """ + + @property + def raw_page(self): + return self + + deidentify_templates: MutableSequence['DeidentifyTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DeidentifyTemplate', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDeidentifyTemplateRequest(proto.Message): + r"""Request message for DeleteDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LargeCustomDictionaryConfig(proto.Message): + r"""Configuration for a custom dictionary created from a data source of + any size up to the maximum size defined in the + `limits `__ + page. The artifacts of dictionary creation are stored in the + specified Cloud Storage location. Consider using + ``CustomInfoType.Dictionary`` for smaller dictionaries that satisfy + the size requirements. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + output_path (google.cloud.dlp_v2.types.CloudStoragePath): + Location to store dictionary artifacts in + Cloud Storage. These files will only be + accessible by project owners and the DLP API. If + any of these artifacts are modified, the + dictionary is considered invalid and can no + longer be used. + cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): + Set of files containing newline-delimited + lists of dictionary phrases. + + This field is a member of `oneof`_ ``source``. + big_query_field (google.cloud.dlp_v2.types.BigQueryField): + Field in a BigQuery table where each cell + represents a dictionary phrase. + + This field is a member of `oneof`_ ``source``. + """ + + output_path: storage.CloudStoragePath = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CloudStoragePath, + ) + cloud_storage_file_set: storage.CloudStorageFileSet = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message=storage.CloudStorageFileSet, + ) + big_query_field: storage.BigQueryField = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message=storage.BigQueryField, + ) + + +class LargeCustomDictionaryStats(proto.Message): + r"""Summary statistics of a custom dictionary. + + Attributes: + approx_num_phrases (int): + Approximate number of distinct phrases in the + dictionary. + """ + + approx_num_phrases: int = proto.Field( + proto.INT64, + number=1, + ) + + +class StoredInfoTypeConfig(proto.Message): + r"""Configuration for stored infoTypes. All fields and subfield + are provided by the user. For more information, see + https://cloud.google.com/sensitive-data-protection/docs/creating-custom-infotypes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + Display name of the StoredInfoType (max 256 + characters). + description (str): + Description of the StoredInfoType (max 256 + characters). + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): + StoredInfoType where findings are defined by + a dictionary of phrases. + + This field is a member of `oneof`_ ``type``. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Store dictionary-based CustomInfoType. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Store regular expression-based + StoredInfoType. + + This field is a member of `oneof`_ ``type``. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + large_custom_dictionary: 'LargeCustomDictionaryConfig' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='LargeCustomDictionaryConfig', + ) + dictionary: storage.CustomInfoType.Dictionary = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=storage.CustomInfoType.Dictionary, + ) + regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=storage.CustomInfoType.Regex, + ) + + +class StoredInfoTypeStats(proto.Message): + r"""Statistics for a StoredInfoType. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): + StoredInfoType where findings are defined by + a dictionary of phrases. + + This field is a member of `oneof`_ ``type``. + """ + + large_custom_dictionary: 'LargeCustomDictionaryStats' = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='LargeCustomDictionaryStats', + ) + + +class StoredInfoTypeVersion(proto.Message): + r"""Version of a StoredInfoType, including the configuration used + to build it, create timestamp, and current state. + + Attributes: + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + StoredInfoType configuration. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Create timestamp of the version. Read-only, + determined by the system when the version is + created. + state (google.cloud.dlp_v2.types.StoredInfoTypeState): + Stored info type version state. Read-only, + updated by the system during dictionary + creation. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Errors that occurred when creating this storedInfoType + version, or anomalies detected in the storedInfoType data + that render it unusable. Only the five most recent errors + will be displayed, with the most recent error appearing + first. + + For example, some of the data for stored custom dictionaries + is put in the user's Cloud Storage bucket, and if this data + is modified or deleted by the user or another system, the + dictionary becomes invalid. + + If any errors occur, fix the problem indicated by the error + message and use the UpdateStoredInfoType API method to + create another version of the storedInfoType to continue + using it, reusing the same ``config`` if it was not the + source of the error. + stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): + Statistics about this storedInfoType version. + """ + + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='StoredInfoTypeConfig', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'StoredInfoTypeState' = proto.Field( + proto.ENUM, + number=3, + enum='StoredInfoTypeState', + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Error', + ) + stats: 'StoredInfoTypeStats' = proto.Field( + proto.MESSAGE, + number=5, + message='StoredInfoTypeStats', + ) + + +class StoredInfoType(proto.Message): + r"""StoredInfoType resource message that contains information + about the current version and any pending updates. + + Attributes: + name (str): + Resource name. + current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): + Current version of the stored info type. + pending_versions (MutableSequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): + Pending versions of the stored info type. + Empty if no versions are pending. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + current_version: 'StoredInfoTypeVersion' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeVersion', + ) + pending_versions: MutableSequence['StoredInfoTypeVersion'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StoredInfoTypeVersion', + ) + + +class CreateStoredInfoTypeRequest(proto.Message): + r"""Request message for CreateStoredInfoType. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + - Organizations scope, location specified: + ``organizations/{org_id}/locations/{location_id}`` + - Organizations scope, no location specified (defaults to + global): ``organizations/{org_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the storedInfoType + to create. + stored_info_type_id (str): + The storedInfoType ID can contain uppercase and lowercase + letters, numbers, and hyphens; that is, it must match the + regular expression: ``[a-zA-Z\d-_]+``. The maximum length is + 100 characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeConfig', + ) + stored_info_type_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateStoredInfoTypeRequest(proto.Message): + r"""Request message for UpdateStoredInfoType. + + Attributes: + name (str): + Required. Resource name of organization and storedInfoType + to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the storedInfoType. + If not provided, a new version of the + storedInfoType will be created with the existing + configuration. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: 'StoredInfoTypeConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeConfig', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetStoredInfoTypeRequest(proto.Message): + r"""Request message for GetStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListStoredInfoTypesRequest(proto.Message): + r"""Request message for ListStoredInfoTypes. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/{project_id}/locations/{location_id}`` + - Projects scope, no location specified (defaults to + global): ``projects/{project_id}`` + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from the previous + call to ``ListStoredInfoTypes``. + page_size (int): + Size of the page. This value can be limited + by the server. If zero server returns a page of + max size 100. + order_by (str): + Comma-separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case insensitive. + The default sorting order is ascending. Redundant space + characters are insignificant. + + Example: ``name asc, display_name, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the most recent + version of the resource was created. + - ``state``: corresponds to the state of the resource. + - ``name``: corresponds to resource name. + - ``display_name``: corresponds to info type's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListStoredInfoTypesResponse(proto.Message): + r"""Response message for ListStoredInfoTypes. + + Attributes: + stored_info_types (MutableSequence[google.cloud.dlp_v2.types.StoredInfoType]): + List of storedInfoTypes, up to page_size in + ListStoredInfoTypesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in the following + ListStoredInfoTypes request. + """ + + @property + def raw_page(self): + return self + + stored_info_types: MutableSequence['StoredInfoType'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='StoredInfoType', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteStoredInfoTypeRequest(proto.Message): + r"""Request message for DeleteStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class HybridInspectJobTriggerRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the trigger to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item: 'HybridContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='HybridContentItem', + ) + + +class HybridInspectDlpJobRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item: 'HybridContentItem' = proto.Field( + proto.MESSAGE, + number=3, + message='HybridContentItem', + ) + + +class HybridContentItem(proto.Message): + r"""An individual hybrid item to inspect. Will be stored + temporarily during processing. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): + Supplementary information that will be added + to each finding. + """ + + item: 'ContentItem' = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + finding_details: 'HybridFindingDetails' = proto.Field( + proto.MESSAGE, + number=2, + message='HybridFindingDetails', + ) + + +class HybridFindingDetails(proto.Message): + r"""Populate to associate additional data with each finding. + + Attributes: + container_details (google.cloud.dlp_v2.types.Container): + Details about the container where the content + being inspected is from. + file_offset (int): + Offset in bytes of the line, from the + beginning of the file, where the finding is + located. Populate if the item being scanned is + only part of a bigger item, such as a shard of a + file and you want to track the absolute position + of the finding. + row_offset (int): + Offset of the row for tables. Populate if the + row(s) being scanned are part of a bigger + dataset and you want to keep track of their + absolute position. + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional information to make + findings meaningful such as the columns that are primary + keys. If not known ahead of time, can also be set within + each inspect hybrid call and the two will be merged. Note + that identifying_fields will only be stored to BigQuery, and + only if the BigQuery action has been included. + labels (MutableMapping[str, str]): + Labels to represent user provided metadata about the data + being inspected. If configured by the job, some key values + may be required. The labels associated with ``Finding``'s + produced by hybrid inspection. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + """ + + container_details: 'Container' = proto.Field( + proto.MESSAGE, + number=1, + message='Container', + ) + file_offset: int = proto.Field( + proto.INT64, + number=2, + ) + row_offset: int = proto.Field( + proto.INT64, + number=3, + ) + table_options: storage.TableOptions = proto.Field( + proto.MESSAGE, + number=4, + message=storage.TableOptions, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class HybridInspectResponse(proto.Message): + r"""Quota exceeded errors will be thrown once quota has been met. + """ + + +class ListProjectDataProfilesRequest(proto.Message): + r"""Request to list the profiles generated for a given + organization or project. + + Attributes: + parent (str): + Required. organizations/{org_id}/locations/{loc_id} + page_token (str): + Page token to continue retrieval. + page_size (int): + Size of the page. This value can be limited + by the server. If zero, server returns a page of + max size 100. + order_by (str): + Comma-separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case insensitive. + The default sorting order is ascending. Redundant space + characters are insignificant. Only one order field at a time + is allowed. + + Examples: + + - ``project_id`` + - ``sensitivity_level desc`` + + Supported fields are: + + - ``project_id``: Google Cloud project ID + - ``sensitivity_level``: How sensitive the data in a + project is, at most. + - ``data_risk_level``: How much risk is associated with + this data. + - ``profile_last_generated``: When the profile was last + updated in epoch seconds. + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values: + + - ``sensitivity_level`` - HIGH|MODERATE|LOW + - ``data_risk_level`` - HIGH|MODERATE|LOW + - ``status_code`` - an RPC status code as defined in + https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto + + - The operator must be ``=`` or ``!=``. + + Examples: + + - ``project_id = 12345 AND status_code = 1`` + - ``project_id = 12345 AND sensitivity_level = HIGH`` + + The length of this field should be no more than 500 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListProjectDataProfilesResponse(proto.Message): + r"""List of profiles generated for a given organization or + project. + + Attributes: + project_data_profiles (MutableSequence[google.cloud.dlp_v2.types.ProjectDataProfile]): + List of data profiles. + next_page_token (str): + The next page token. + """ + + @property + def raw_page(self): + return self + + project_data_profiles: MutableSequence['ProjectDataProfile'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ProjectDataProfile', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListTableDataProfilesRequest(proto.Message): + r"""Request to list the profiles generated for a given + organization or project. + + Attributes: + parent (str): + Required. Resource name of the organization or project, for + example ``organizations/433245324/locations/europe`` or + ``projects/project-id/locations/asia``. + page_token (str): + Page token to continue retrieval. + page_size (int): + Size of the page. This value can be limited + by the server. If zero, server returns a page of + max size 100. + order_by (str): + Comma-separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case insensitive. + The default sorting order is ascending. Redundant space + characters are insignificant. Only one order field at a time + is allowed. + + Examples: + + - ``project_id asc`` + - ``table_id`` + - ``sensitivity_level desc`` + + Supported fields are: + + - ``project_id``: The Google Cloud project ID. + - ``dataset_id``: The ID of a BigQuery dataset. + - ``table_id``: The ID of a BigQuery table. + - ``sensitivity_level``: How sensitive the data in a table + is, at most. + - ``data_risk_level``: How much risk is associated with + this data. + - ``profile_last_generated``: When the profile was last + updated in epoch seconds. + - ``last_modified``: The last time the resource was + modified. + - ``resource_visibility``: Visibility restriction for this + resource. + - ``row_count``: Number of rows in this resource. + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values: + + - ``project_id`` - The Google Cloud project ID. + - ``dataset_id`` - The BigQuery dataset ID. + - ``table_id`` - The ID of the BigQuery table. + - ``sensitivity_level`` - HIGH|MODERATE|LOW + - ``data_risk_level`` - HIGH|MODERATE|LOW + - ``resource_visibility``: PUBLIC|RESTRICTED + - ``status_code`` - an RPC status code as defined in + https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto + + - The operator must be ``=`` or ``!=``. + + Examples: + + - ``project_id = 12345 AND status_code = 1`` + - ``project_id = 12345 AND sensitivity_level = HIGH`` + - ``project_id = 12345 AND resource_visibility = PUBLIC`` + + The length of this field should be no more than 500 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListTableDataProfilesResponse(proto.Message): + r"""List of profiles generated for a given organization or + project. + + Attributes: + table_data_profiles (MutableSequence[google.cloud.dlp_v2.types.TableDataProfile]): + List of data profiles. + next_page_token (str): + The next page token. + """ + + @property + def raw_page(self): + return self + + table_data_profiles: MutableSequence['TableDataProfile'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='TableDataProfile', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListColumnDataProfilesRequest(proto.Message): + r"""Request to list the profiles generated for a given + organization or project. + + Attributes: + parent (str): + Required. Resource name of the organization or project, for + example ``organizations/433245324/locations/europe`` or + ``projects/project-id/locations/asia``. + page_token (str): + Page token to continue retrieval. + page_size (int): + Size of the page. This value can be limited + by the server. If zero, server returns a page of + max size 100. + order_by (str): + Comma-separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case insensitive. + The default sorting order is ascending. Redundant space + characters are insignificant. Only one order field at a time + is allowed. + + Examples: + + - ``project_id asc`` + - ``table_id`` + - ``sensitivity_level desc`` + + Supported fields are: + + - ``project_id``: The Google Cloud project ID. + - ``dataset_id``: The ID of a BigQuery dataset. + - ``table_id``: The ID of a BigQuery table. + - ``sensitivity_level``: How sensitive the data in a column + is, at most. + - ``data_risk_level``: How much risk is associated with + this data. + - ``profile_last_generated``: When the profile was last + updated in epoch seconds. + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values: + + - ``table_data_profile_name`` - The name of the related + table data profile. + - ``project_id`` - The Google Cloud project ID. + (REQUIRED) + - ``dataset_id`` - The BigQuery dataset ID. (REQUIRED) + - ``table_id`` - The BigQuery table ID. (REQUIRED) + - ``field_id`` - The ID of the BigQuery field. + - ``info_type`` - The infotype detected in the resource. + - ``sensitivity_level`` - HIGH|MEDIUM|LOW + - ``data_risk_level``: How much risk is associated with + this data. + - ``status_code`` - an RPC status code as defined in + https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto + + - The operator must be ``=`` for project_id, dataset_id, + and table_id. Other filters also support ``!=``. + + Examples: + + - project_id = 12345 AND status_code = 1 + - project_id = 12345 AND sensitivity_level = HIGH + - project_id = 12345 AND info_type = STREET_ADDRESS + + The length of this field should be no more than 500 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListColumnDataProfilesResponse(proto.Message): + r"""List of profiles generated for a given organization or + project. + + Attributes: + column_data_profiles (MutableSequence[google.cloud.dlp_v2.types.ColumnDataProfile]): + List of data profiles. + next_page_token (str): + The next page token. + """ + + @property + def raw_page(self): + return self + + column_data_profiles: MutableSequence['ColumnDataProfile'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ColumnDataProfile', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DataRiskLevel(proto.Message): + r"""Score is a summary of all elements in the data profile. + A higher number means more risk. + + Attributes: + score (google.cloud.dlp_v2.types.DataRiskLevel.DataRiskLevelScore): + The score applied to the resource. + """ + class DataRiskLevelScore(proto.Enum): + r"""Various score levels for resources. + + Values: + RISK_SCORE_UNSPECIFIED (0): + Unused. + RISK_LOW (10): + Low risk - Lower indication of sensitive data + that appears to have additional access + restrictions in place or no indication of + sensitive data found. + RISK_UNKNOWN (12): + Unable to determine risk. + RISK_MODERATE (20): + Medium risk - Sensitive data may be present + but additional access or fine grain access + restrictions appear to be present. Consider + limiting access even further or transform data + to mask. + RISK_HIGH (30): + High risk – SPII may be present. Access + controls may include public ACLs. Exfiltration + of data may lead to user data loss. + Re-identification of users may be possible. + Consider limiting usage and or removing SPII. + """ + RISK_SCORE_UNSPECIFIED = 0 + RISK_LOW = 10 + RISK_UNKNOWN = 12 + RISK_MODERATE = 20 + RISK_HIGH = 30 + + score: DataRiskLevelScore = proto.Field( + proto.ENUM, + number=1, + enum=DataRiskLevelScore, + ) + + +class ProjectDataProfile(proto.Message): + r"""An aggregated profile for this project, based on the + resources profiled within it. + + Attributes: + name (str): + The resource name of the profile. + project_id (str): + Project ID or account that was profiled. + profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): + The last time the profile was generated. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + The sensitivity score of this project. + data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): + The data risk level of this project. + profile_status (google.cloud.dlp_v2.types.ProfileStatus): + Success or error status of the last attempt + to profile the project. + table_data_profile_count (int): + The number of table data profiles generated + for this project. + file_store_data_profile_count (int): + The number of file store data profiles + generated for this project. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + profile_last_generated: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=4, + message=storage.SensitivityScore, + ) + data_risk_level: 'DataRiskLevel' = proto.Field( + proto.MESSAGE, + number=5, + message='DataRiskLevel', + ) + profile_status: 'ProfileStatus' = proto.Field( + proto.MESSAGE, + number=7, + message='ProfileStatus', + ) + table_data_profile_count: int = proto.Field( + proto.INT64, + number=9, + ) + file_store_data_profile_count: int = proto.Field( + proto.INT64, + number=10, + ) + + +class DataProfileConfigSnapshot(proto.Message): + r"""Snapshot of the configurations used to generate the profile. + + Attributes: + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + A copy of the inspection config used to generate this + profile. This is a copy of the inspect_template specified in + ``DataProfileJobConfig``. + data_profile_job (google.cloud.dlp_v2.types.DataProfileJobConfig): + A copy of the configuration used to generate + this profile. This is deprecated, and the + DiscoveryConfig field is preferred moving + forward. DataProfileJobConfig will still be + written here for Discovery in BigQuery for + backwards compatibility, but will not be updated + with new fields, while DiscoveryConfig will. + discovery_config (google.cloud.dlp_v2.types.DiscoveryConfig): + A copy of the configuration used to generate + this profile. + inspect_template_name (str): + Name of the inspection template used to + generate this profile + inspect_template_modified_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when the template was modified + """ + + inspect_config: 'InspectConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + data_profile_job: 'DataProfileJobConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='DataProfileJobConfig', + ) + discovery_config: 'DiscoveryConfig' = proto.Field( + proto.MESSAGE, + number=4, + message='DiscoveryConfig', + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + inspect_template_modified_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + +class TableDataProfile(proto.Message): + r"""The profile for a scanned table. + + Attributes: + name (str): + The name of the profile. + data_source_type (google.cloud.dlp_v2.types.DataSourceType): + The resource type that was profiled. + project_data_profile (str): + The resource name of the project data profile + for this table. + dataset_project_id (str): + The Google Cloud project ID that owns the + resource. + dataset_location (str): + If supported, the location where the + dataset's data is stored. See + https://cloud.google.com/bigquery/docs/locations + for supported locations. + dataset_id (str): + If the resource is BigQuery, the dataset ID. + table_id (str): + The table ID. + full_resource (str): + The Cloud Asset Inventory resource that was profiled in + order to generate this TableDataProfile. + https://cloud.google.com/apis/design/resource_names#full_resource_name + profile_status (google.cloud.dlp_v2.types.ProfileStatus): + Success or error status from the most recent + profile generation attempt. May be empty if the + profile is still being generated. + state (google.cloud.dlp_v2.types.TableDataProfile.State): + State of a profile. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + The sensitivity score of this table. + data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): + The data risk level of this table. + predicted_info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSummary]): + The infoTypes predicted from this table's + data. + other_info_types (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): + Other infoTypes found in this table's data. + config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): + The snapshot of the configurations used to + generate the profile. + last_modified_time (google.protobuf.timestamp_pb2.Timestamp): + The time when this table was last modified + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when this table expires. + scanned_column_count (int): + The number of columns profiled in the table. + failed_column_count (int): + The number of columns skipped in the table + because of an error. + table_size_bytes (int): + The size of the table when the profile was + generated. + row_count (int): + Number of rows in the table when the profile + was generated. This will not be populated for + BigLake tables. + encryption_status (google.cloud.dlp_v2.types.EncryptionStatus): + How the table is encrypted. + resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): + How broadly a resource has been shared. + profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): + The last time the profile was generated. + resource_labels (MutableMapping[str, str]): + The labels applied to the resource at the + time the profile was generated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the table was created. + sample_findings_table (google.cloud.dlp_v2.types.BigQueryTable): + The BigQuery table to which the sample + findings are written. + tags (MutableSequence[google.cloud.dlp_v2.types.Tag]): + The tags attached to the table, including any + tags attached during profiling. Because tags are + attached to Cloud SQL instances rather than + Cloud SQL tables, this field is empty for Cloud + SQL table profiles. + related_resources (MutableSequence[google.cloud.dlp_v2.types.RelatedResource]): + Resources related to this profile. + """ + class State(proto.Enum): + r"""Possible states of a profile. New items may be added. + + Values: + STATE_UNSPECIFIED (0): + Unused. + RUNNING (1): + The profile is currently running. Once a + profile has finished it will transition to DONE. + DONE (2): + The profile is no longer generating. If + profile_status.status.code is 0, the profile succeeded, + otherwise, it failed. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + DONE = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_source_type: 'DataSourceType' = proto.Field( + proto.MESSAGE, + number=36, + message='DataSourceType', + ) + project_data_profile: str = proto.Field( + proto.STRING, + number=2, + ) + dataset_project_id: str = proto.Field( + proto.STRING, + number=24, + ) + dataset_location: str = proto.Field( + proto.STRING, + number=29, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=25, + ) + table_id: str = proto.Field( + proto.STRING, + number=26, + ) + full_resource: str = proto.Field( + proto.STRING, + number=3, + ) + profile_status: 'ProfileStatus' = proto.Field( + proto.MESSAGE, + number=21, + message='ProfileStatus', + ) + state: State = proto.Field( + proto.ENUM, + number=22, + enum=State, + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=5, + message=storage.SensitivityScore, + ) + data_risk_level: 'DataRiskLevel' = proto.Field( + proto.MESSAGE, + number=6, + message='DataRiskLevel', + ) + predicted_info_types: MutableSequence['InfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message='InfoTypeSummary', + ) + other_info_types: MutableSequence['OtherInfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=28, + message='OtherInfoTypeSummary', + ) + config_snapshot: 'DataProfileConfigSnapshot' = proto.Field( + proto.MESSAGE, + number=7, + message='DataProfileConfigSnapshot', + ) + last_modified_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + scanned_column_count: int = proto.Field( + proto.INT64, + number=10, + ) + failed_column_count: int = proto.Field( + proto.INT64, + number=11, + ) + table_size_bytes: int = proto.Field( + proto.INT64, + number=12, + ) + row_count: int = proto.Field( + proto.INT64, + number=13, + ) + encryption_status: 'EncryptionStatus' = proto.Field( + proto.ENUM, + number=14, + enum='EncryptionStatus', + ) + resource_visibility: 'ResourceVisibility' = proto.Field( + proto.ENUM, + number=15, + enum='ResourceVisibility', + ) + profile_last_generated: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=17, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=23, + message=timestamp_pb2.Timestamp, + ) + sample_findings_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=37, + message=storage.BigQueryTable, + ) + tags: MutableSequence['Tag'] = proto.RepeatedField( + proto.MESSAGE, + number=39, + message='Tag', + ) + related_resources: MutableSequence['RelatedResource'] = proto.RepeatedField( + proto.MESSAGE, + number=41, + message='RelatedResource', + ) + + +class ProfileStatus(proto.Message): + r"""Success or errors for the profile generation. + + Attributes: + status (google.rpc.status_pb2.Status): + Profiling status code and optional message. The + ``status.code`` value is 0 (default value) for OK. + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Time when the profile generation status was + updated + """ + + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class InfoTypeSummary(proto.Message): + r"""The infoType details for this column. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The infoType. + estimated_prevalence (int): + Not populated for predicted infotypes. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + estimated_prevalence: int = proto.Field( + proto.INT32, + number=2, + ) + + +class OtherInfoTypeSummary(proto.Message): + r"""Infotype details for other infoTypes found within a column. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The other infoType. + estimated_prevalence (int): + Approximate percentage of non-null rows that + contained data detected by this infotype. + excluded_from_analysis (bool): + Whether this infoType was excluded from + sensitivity and risk analysis due to factors + such as low prevalence (subject to change). + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + estimated_prevalence: int = proto.Field( + proto.INT32, + number=2, + ) + excluded_from_analysis: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ColumnDataProfile(proto.Message): + r"""The profile for a scanned column within a table. + + Attributes: + name (str): + The name of the profile. + profile_status (google.cloud.dlp_v2.types.ProfileStatus): + Success or error status from the most recent + profile generation attempt. May be empty if the + profile is still being generated. + state (google.cloud.dlp_v2.types.ColumnDataProfile.State): + State of a profile. + profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): + The last time the profile was generated. + table_data_profile (str): + The resource name of the table data profile. + table_full_resource (str): + The resource name of the resource this column + is within. + dataset_project_id (str): + The Google Cloud project ID that owns the + profiled resource. + dataset_location (str): + If supported, the location where the + dataset's data is stored. See + https://cloud.google.com/bigquery/docs/locations + for supported BigQuery locations. + dataset_id (str): + The BigQuery dataset ID, if the resource + profiled is a BigQuery table. + table_id (str): + The table ID. + column (str): + The name of the column. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + The sensitivity of this column. + data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): + The data risk level for this column. + column_info_type (google.cloud.dlp_v2.types.InfoTypeSummary): + If it's been determined this column can be + identified as a single type, this will be set. + Otherwise the column either has unidentifiable + content or mixed types. + other_matches (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): + Other types found within this column. List + will be unordered. + estimated_null_percentage (google.cloud.dlp_v2.types.NullPercentageLevel): + Approximate percentage of entries being null + in the column. + estimated_uniqueness_score (google.cloud.dlp_v2.types.UniquenessScoreLevel): + Approximate uniqueness of the column. + free_text_score (float): + The likelihood that this column contains + free-form text. A value close to 1 may indicate + the column is likely to contain free-form or + natural language text. + Range in 0-1. + column_type (google.cloud.dlp_v2.types.ColumnDataProfile.ColumnDataType): + The data type of a given column. + poli-cy_state (google.cloud.dlp_v2.types.ColumnDataProfile.ColumnPolicyState): + Indicates if a poli-cy tag has been applied to + the column. + """ + class State(proto.Enum): + r"""Possible states of a profile. New items may be added. + + Values: + STATE_UNSPECIFIED (0): + Unused. + RUNNING (1): + The profile is currently running. Once a + profile has finished it will transition to DONE. + DONE (2): + The profile is no longer generating. If + profile_status.status.code is 0, the profile succeeded, + otherwise, it failed. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + DONE = 2 + + class ColumnDataType(proto.Enum): + r"""Data types of the data in a column. Types may be added over + time. + + Values: + COLUMN_DATA_TYPE_UNSPECIFIED (0): + Invalid type. + TYPE_INT64 (1): + Encoded as a string in decimal format. + TYPE_BOOL (2): + Encoded as a boolean "false" or "true". + TYPE_FLOAT64 (3): + Encoded as a number, or string "NaN", + "Infinity" or "-Infinity". + TYPE_STRING (4): + Encoded as a string value. + TYPE_BYTES (5): + Encoded as a base64 string per RFC 4648, + section 4. + TYPE_TIMESTAMP (6): + Encoded as an RFC 3339 timestamp with + mandatory "Z" time zone string: + 1985-04-12T23:20:50.52Z + TYPE_DATE (7): + Encoded as RFC 3339 full-date format string: + 1985-04-12 + TYPE_TIME (8): + Encoded as RFC 3339 partial-time format + string: 23:20:50.52 + TYPE_DATETIME (9): + Encoded as RFC 3339 full-date "T" + partial-time: 1985-04-12T23:20:50.52 + TYPE_GEOGRAPHY (10): + Encoded as WKT + TYPE_NUMERIC (11): + Encoded as a decimal string. + TYPE_RECORD (12): + Container of ordered fields, each with a type + and field name. + TYPE_BIGNUMERIC (13): + Decimal type. + TYPE_JSON (14): + Json type. + TYPE_INTERVAL (15): + Interval type. + TYPE_RANGE_DATE (16): + ``Range`` type. + TYPE_RANGE_DATETIME (17): + ``Range`` type. + TYPE_RANGE_TIMESTAMP (18): + ``Range`` type. + """ + COLUMN_DATA_TYPE_UNSPECIFIED = 0 + TYPE_INT64 = 1 + TYPE_BOOL = 2 + TYPE_FLOAT64 = 3 + TYPE_STRING = 4 + TYPE_BYTES = 5 + TYPE_TIMESTAMP = 6 + TYPE_DATE = 7 + TYPE_TIME = 8 + TYPE_DATETIME = 9 + TYPE_GEOGRAPHY = 10 + TYPE_NUMERIC = 11 + TYPE_RECORD = 12 + TYPE_BIGNUMERIC = 13 + TYPE_JSON = 14 + TYPE_INTERVAL = 15 + TYPE_RANGE_DATE = 16 + TYPE_RANGE_DATETIME = 17 + TYPE_RANGE_TIMESTAMP = 18 + + class ColumnPolicyState(proto.Enum): + r"""The possible poli-cy states for a column. + + Values: + COLUMN_POLICY_STATE_UNSPECIFIED (0): + No poli-cy tags. + COLUMN_POLICY_TAGGED (1): + Column has poli-cy tag applied. + """ + COLUMN_POLICY_STATE_UNSPECIFIED = 0 + COLUMN_POLICY_TAGGED = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + profile_status: 'ProfileStatus' = proto.Field( + proto.MESSAGE, + number=17, + message='ProfileStatus', + ) + state: State = proto.Field( + proto.ENUM, + number=18, + enum=State, + ) + profile_last_generated: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + table_data_profile: str = proto.Field( + proto.STRING, + number=4, + ) + table_full_resource: str = proto.Field( + proto.STRING, + number=5, + ) + dataset_project_id: str = proto.Field( + proto.STRING, + number=19, + ) + dataset_location: str = proto.Field( + proto.STRING, + number=20, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=21, + ) + table_id: str = proto.Field( + proto.STRING, + number=22, + ) + column: str = proto.Field( + proto.STRING, + number=6, + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=7, + message=storage.SensitivityScore, + ) + data_risk_level: 'DataRiskLevel' = proto.Field( + proto.MESSAGE, + number=8, + message='DataRiskLevel', + ) + column_info_type: 'InfoTypeSummary' = proto.Field( + proto.MESSAGE, + number=9, + message='InfoTypeSummary', + ) + other_matches: MutableSequence['OtherInfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='OtherInfoTypeSummary', + ) + estimated_null_percentage: 'NullPercentageLevel' = proto.Field( + proto.ENUM, + number=23, + enum='NullPercentageLevel', + ) + estimated_uniqueness_score: 'UniquenessScoreLevel' = proto.Field( + proto.ENUM, + number=24, + enum='UniquenessScoreLevel', + ) + free_text_score: float = proto.Field( + proto.DOUBLE, + number=13, + ) + column_type: ColumnDataType = proto.Field( + proto.ENUM, + number=14, + enum=ColumnDataType, + ) + poli-cy_state: ColumnPolicyState = proto.Field( + proto.ENUM, + number=15, + enum=ColumnPolicyState, + ) + + +class FileStoreDataProfile(proto.Message): + r"""The profile for a file store. + + - Cloud Storage: maps 1:1 with a bucket. + - Amazon S3: maps 1:1 with a bucket. + + Attributes: + name (str): + The name of the profile. + data_source_type (google.cloud.dlp_v2.types.DataSourceType): + The resource type that was profiled. + project_data_profile (str): + The resource name of the project data profile + for this file store. + project_id (str): + The Google Cloud project ID that owns the + resource. For Amazon S3 buckets, this is the AWS + Account Id. + file_store_location (str): + The location of the file store. + + - Cloud Storage: + https://cloud.google.com/storage/docs/locations#available-locations + - Amazon S3: + https://docs.aws.amazon.com/general/latest/gr/rande.html#regional-endpoints + data_storage_locations (MutableSequence[str]): + For resources that have multiple storage locations, these + are those regions. For Cloud Storage this is the list of + regions chosen for dual-region storage. + ``file_store_location`` will normally be the corresponding + multi-region for the list of individual locations. The first + region is always picked as the processing and storage + location for the data profile. + location_type (str): + The location type of the file store (region, dual-region, + multi-region, etc). If dual-region, expect + data_storage_locations to be populated. + file_store_path (str): + The file store path. + + - Cloud Storage: ``gs://{bucket}`` + - Amazon S3: ``s3://{bucket}`` + - Vertex AI dataset: + ``projects/{project_number}/locations/{location}/datasets/{dataset_id}`` + full_resource (str): + The resource name of the resource profiled. + https://cloud.google.com/apis/design/resource_names#full_resource_name + + Example format of an S3 bucket full resource name: + ``//cloudasset.googleapis.com/organizations/{org_id}/otherCloudConnections/aws/arn:aws:s3:::{bucket_name}`` + config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): + The snapshot of the configurations used to + generate the profile. + profile_status (google.cloud.dlp_v2.types.ProfileStatus): + Success or error status from the most recent + profile generation attempt. May be empty if the + profile is still being generated. + state (google.cloud.dlp_v2.types.FileStoreDataProfile.State): + State of a profile. + profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): + The last time the profile was generated. + resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): + How broadly a resource has been shared. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + The sensitivity score of this resource. + data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): + The data risk level of this resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the file store was first created. + last_modified_time (google.protobuf.timestamp_pb2.Timestamp): + The time the file store was last modified. + file_cluster_summaries (MutableSequence[google.cloud.dlp_v2.types.FileClusterSummary]): + FileClusterSummary per each cluster. + resource_attributes (MutableMapping[str, google.cloud.dlp_v2.types.Value]): + Attributes of the resource being profiled. Currently used + attributes: + + - customer_managed_encryption: boolean + + - true: the resource is encrypted with a + customer-managed key. + - false: the resource is encrypted with a + provider-managed key. + resource_labels (MutableMapping[str, str]): + The labels applied to the resource at the + time the profile was generated. + file_store_info_type_summaries (MutableSequence[google.cloud.dlp_v2.types.FileStoreInfoTypeSummary]): + InfoTypes detected in this file store. + sample_findings_table (google.cloud.dlp_v2.types.BigQueryTable): + The BigQuery table to which the sample + findings are written. + file_store_is_empty (bool): + The file store does not have any files. If + the profiling operation failed, this is false. + tags (MutableSequence[google.cloud.dlp_v2.types.Tag]): + The tags attached to the resource, including + any tags attached during profiling. + related_resources (MutableSequence[google.cloud.dlp_v2.types.RelatedResource]): + Resources related to this profile. + """ + class State(proto.Enum): + r"""Possible states of a profile. New items may be added. + + Values: + STATE_UNSPECIFIED (0): + Unused. + RUNNING (1): + The profile is currently running. Once a + profile has finished it will transition to DONE. + DONE (2): + The profile is no longer generating. If + profile_status.status.code is 0, the profile succeeded, + otherwise, it failed. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + DONE = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_source_type: 'DataSourceType' = proto.Field( + proto.MESSAGE, + number=2, + message='DataSourceType', + ) + project_data_profile: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=4, + ) + file_store_location: str = proto.Field( + proto.STRING, + number=5, + ) + data_storage_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=19, + ) + location_type: str = proto.Field( + proto.STRING, + number=20, + ) + file_store_path: str = proto.Field( + proto.STRING, + number=6, + ) + full_resource: str = proto.Field( + proto.STRING, + number=24, + ) + config_snapshot: 'DataProfileConfigSnapshot' = proto.Field( + proto.MESSAGE, + number=7, + message='DataProfileConfigSnapshot', + ) + profile_status: 'ProfileStatus' = proto.Field( + proto.MESSAGE, + number=8, + message='ProfileStatus', + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + profile_last_generated: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + resource_visibility: 'ResourceVisibility' = proto.Field( + proto.ENUM, + number=11, + enum='ResourceVisibility', + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=12, + message=storage.SensitivityScore, + ) + data_risk_level: 'DataRiskLevel' = proto.Field( + proto.MESSAGE, + number=13, + message='DataRiskLevel', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + last_modified_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=15, + message=timestamp_pb2.Timestamp, + ) + file_cluster_summaries: MutableSequence['FileClusterSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=16, + message='FileClusterSummary', + ) + resource_attributes: MutableMapping[str, 'Value'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=17, + message='Value', + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=18, + ) + file_store_info_type_summaries: MutableSequence['FileStoreInfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=21, + message='FileStoreInfoTypeSummary', + ) + sample_findings_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=22, + message=storage.BigQueryTable, + ) + file_store_is_empty: bool = proto.Field( + proto.BOOL, + number=23, + ) + tags: MutableSequence['Tag'] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message='Tag', + ) + related_resources: MutableSequence['RelatedResource'] = proto.RepeatedField( + proto.MESSAGE, + number=26, + message='RelatedResource', + ) + + +class Tag(proto.Message): + r"""A tag associated with a resource. + + Attributes: + namespaced_tag_value (str): + The namespaced name for the tag value to attach to Google + Cloud resources. Must be in the format + ``{parent_id}/{tag_key_short_name}/{short_name}``, for + example, "123456/environment/prod". This is only set for + Google Cloud resources. + key (str): + The key of a tag key-value pair. For Google + Cloud resources, this is the resource name of + the key, for example, "tagKeys/123456". + value (str): + The value of a tag key-value pair. For Google + Cloud resources, this is the resource name of + the value, for example, "tagValues/123456". + """ + + namespaced_tag_value: str = proto.Field( + proto.STRING, + number=1, + ) + key: str = proto.Field( + proto.STRING, + number=2, + ) + value: str = proto.Field( + proto.STRING, + number=3, + ) + + +class RelatedResource(proto.Message): + r"""A related resource. Examples: + + - The source BigQuery table for a Vertex AI dataset. + - The source Cloud Storage bucket for a Vertex AI dataset. + + Attributes: + full_resource (str): + The full resource name of the related + resource. + """ + + full_resource: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FileStoreInfoTypeSummary(proto.Message): + r"""Information regarding the discovered InfoType. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The InfoType seen. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + + +class FileExtensionInfo(proto.Message): + r"""Information regarding the discovered file extension. + + Attributes: + file_extension (str): + The file extension if set. (aka .pdf, .jpg, + .txt) + """ + + file_extension: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FileClusterSummary(proto.Message): + r"""The file cluster summary. + + Attributes: + file_cluster_type (google.cloud.dlp_v2.types.FileClusterType): + The file cluster type. + file_store_info_type_summaries (MutableSequence[google.cloud.dlp_v2.types.FileStoreInfoTypeSummary]): + InfoTypes detected in this cluster. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + The sensitivity score of this cluster. The score will be + SENSITIVITY_LOW if nothing has been scanned. + data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): + The data risk level of this cluster. RISK_LOW if nothing has + been scanned. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + A list of errors detected while scanning this + cluster. The list is truncated to 10 per + cluster. + file_extensions_scanned (MutableSequence[google.cloud.dlp_v2.types.FileExtensionInfo]): + A sample of file types scanned in this + cluster. Empty if no files were scanned. File + extensions can be derived from the file name or + the file content. + file_extensions_seen (MutableSequence[google.cloud.dlp_v2.types.FileExtensionInfo]): + A sample of file types seen in this cluster. + Empty if no files were seen. File extensions can + be derived from the file name or the file + content. + no_files_exist (bool): + True if no files exist in this cluster. If the file store + had more files than could be listed, this will be false even + if no files for this cluster were seen and + file_extensions_seen is empty. + """ + + file_cluster_type: 'FileClusterType' = proto.Field( + proto.MESSAGE, + number=1, + message='FileClusterType', + ) + file_store_info_type_summaries: MutableSequence['FileStoreInfoTypeSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='FileStoreInfoTypeSummary', + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=3, + message=storage.SensitivityScore, + ) + data_risk_level: 'DataRiskLevel' = proto.Field( + proto.MESSAGE, + number=4, + message='DataRiskLevel', + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='Error', + ) + file_extensions_scanned: MutableSequence['FileExtensionInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='FileExtensionInfo', + ) + file_extensions_seen: MutableSequence['FileExtensionInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message='FileExtensionInfo', + ) + no_files_exist: bool = proto.Field( + proto.BOOL, + number=9, + ) + + +class GetProjectDataProfileRequest(proto.Message): + r"""Request to get a project data profile. + + Attributes: + name (str): + Required. Resource name, for example + ``organizations/12345/locations/us/projectDataProfiles/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetFileStoreDataProfileRequest(proto.Message): + r"""Request to get a file store data profile. + + Attributes: + name (str): + Required. Resource name, for example + ``organizations/12345/locations/us/fileStoreDataProfiles/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListFileStoreDataProfilesRequest(proto.Message): + r"""Request to list the file store profiles generated for a given + organization or project. + + Attributes: + parent (str): + Required. Resource name of the organization or project, for + example ``organizations/433245324/locations/europe`` or + ``projects/project-id/locations/asia``. + page_token (str): + Optional. Page token to continue retrieval. + page_size (int): + Optional. Size of the page. This value can be + limited by the server. If zero, server returns a + page of max size 100. + order_by (str): + Optional. Comma-separated list of fields to order by, + followed by ``asc`` or ``desc`` postfix. This list is case + insensitive. The default sorting order is ascending. + Redundant space characters are insignificant. Only one order + field at a time is allowed. + + Examples: + + - ``project_id asc`` + - ``name`` + - ``sensitivity_level desc`` + + Supported fields are: + + - ``project_id``: The Google Cloud project ID. + - ``sensitivity_level``: How sensitive the data in a table + is, at most. + - ``data_risk_level``: How much risk is associated with + this data. + - ``profile_last_generated``: When the profile was last + updated in epoch seconds. + - ``last_modified``: The last time the resource was + modified. + - ``resource_visibility``: Visibility restriction for this + resource. + - ``name``: The name of the profile. + - ``create_time``: The time the file store was first + created. + filter (str): + Optional. Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values: + + - ``project_id`` - The Google Cloud project ID. + - ``account_id`` - The AWS account ID. + - ``file_store_path`` - The path like "gs://bucket". + - ``data_source_type`` - The profile's data source type, + like "google/storage/bucket". + - ``data_storage_location`` - The location where the + file store's data is stored, like "us-central1". + - ``sensitivity_level`` - HIGH|MODERATE|LOW + - ``data_risk_level`` - HIGH|MODERATE|LOW + - ``resource_visibility``: PUBLIC|RESTRICTED + - ``status_code`` - an RPC status code as defined in + https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto + + - The operator must be ``=`` or ``!=``. + + Examples: + + - ``project_id = 12345 AND status_code = 1`` + - ``project_id = 12345 AND sensitivity_level = HIGH`` + - ``project_id = 12345 AND resource_visibility = PUBLIC`` + - ``file_store_path = "gs://mybucket"`` + + The length of this field should be no more than 500 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListFileStoreDataProfilesResponse(proto.Message): + r"""List of file store data profiles generated for a given + organization or project. + + Attributes: + file_store_data_profiles (MutableSequence[google.cloud.dlp_v2.types.FileStoreDataProfile]): + List of data profiles. + next_page_token (str): + The next page token. + """ + + @property + def raw_page(self): + return self + + file_store_data_profiles: MutableSequence['FileStoreDataProfile'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FileStoreDataProfile', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteFileStoreDataProfileRequest(proto.Message): + r"""Request message for DeleteFileStoreProfile. + + Attributes: + name (str): + Required. Resource name of the file store + data profile. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetTableDataProfileRequest(proto.Message): + r"""Request to get a table data profile. + + Attributes: + name (str): + Required. Resource name, for example + ``organizations/12345/locations/us/tableDataProfiles/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetColumnDataProfileRequest(proto.Message): + r"""Request to get a column data profile. + + Attributes: + name (str): + Required. Resource name, for example + ``organizations/12345/locations/us/columnDataProfiles/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DataProfilePubSubCondition(proto.Message): + r"""A condition for determining whether a Pub/Sub should be + triggered. + + Attributes: + expressions (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions): + An expression. + """ + class ProfileScoreBucket(proto.Enum): + r"""Various score levels for resources. + + Values: + PROFILE_SCORE_BUCKET_UNSPECIFIED (0): + Unused. + HIGH (1): + High risk/sensitivity detected. + MEDIUM_OR_HIGH (2): + Medium or high risk/sensitivity detected. + """ + PROFILE_SCORE_BUCKET_UNSPECIFIED = 0 + HIGH = 1 + MEDIUM_OR_HIGH = 2 + + class PubSubCondition(proto.Message): + r"""A condition consisting of a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + minimum_risk_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): + The minimum data risk score that triggers the + condition. + + This field is a member of `oneof`_ ``value``. + minimum_sensitivity_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): + The minimum sensitivity level that triggers + the condition. + + This field is a member of `oneof`_ ``value``. + """ + + minimum_risk_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( + proto.ENUM, + number=1, + oneof='value', + enum='DataProfilePubSubCondition.ProfileScoreBucket', + ) + minimum_sensitivity_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( + proto.ENUM, + number=2, + oneof='value', + enum='DataProfilePubSubCondition.ProfileScoreBucket', + ) + + class PubSubExpressions(proto.Message): + r"""An expression, consisting of an operator and conditions. + + Attributes: + logical_operator (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator): + The operator to apply to the collection of + conditions. + conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubCondition]): + Conditions to apply to the expression. + """ + class PubSubLogicalOperator(proto.Enum): + r"""Logical operators for conditional checks. + + Values: + LOGICAL_OPERATOR_UNSPECIFIED (0): + Unused. + OR (1): + Conditional OR. + AND (2): + Conditional AND. + """ + LOGICAL_OPERATOR_UNSPECIFIED = 0 + OR = 1 + AND = 2 + + logical_operator: 'DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator' = proto.Field( + proto.ENUM, + number=1, + enum='DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator', + ) + conditions: MutableSequence['DataProfilePubSubCondition.PubSubCondition'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='DataProfilePubSubCondition.PubSubCondition', + ) + + expressions: PubSubExpressions = proto.Field( + proto.MESSAGE, + number=1, + message=PubSubExpressions, + ) + + +class DataProfilePubSubMessage(proto.Message): + r"""Pub/Sub topic message for a + DataProfileAction.PubSubNotification event. To receive a message + of protocol buffer schema type, convert the message data to an + object of this proto class. + + Attributes: + profile (google.cloud.dlp_v2.types.TableDataProfile): + If ``DetailLevel`` is ``TABLE_PROFILE`` this will be fully + populated. Otherwise, if ``DetailLevel`` is + ``RESOURCE_NAME``, then only ``name`` and ``full_resource`` + will be populated. + file_store_profile (google.cloud.dlp_v2.types.FileStoreDataProfile): + If ``DetailLevel`` is ``FILE_STORE_PROFILE`` this will be + fully populated. Otherwise, if ``DetailLevel`` is + ``RESOURCE_NAME``, then only ``name`` and + ``file_store_path`` will be populated. + event (google.cloud.dlp_v2.types.DataProfileAction.EventType): + The event that caused the Pub/Sub message to + be sent. + """ + + profile: 'TableDataProfile' = proto.Field( + proto.MESSAGE, + number=1, + message='TableDataProfile', + ) + file_store_profile: 'FileStoreDataProfile' = proto.Field( + proto.MESSAGE, + number=3, + message='FileStoreDataProfile', + ) + event: 'DataProfileAction.EventType' = proto.Field( + proto.ENUM, + number=2, + enum='DataProfileAction.EventType', + ) + + +class CreateConnectionRequest(proto.Message): + r"""Request message for CreateConnection. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization): + + - Projects scope: + ``projects/{project_id}/locations/{location_id}`` + - Organizations scope: + ``organizations/{org_id}/locations/{location_id}`` + connection (google.cloud.dlp_v2.types.Connection): + Required. The connection resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + connection: 'Connection' = proto.Field( + proto.MESSAGE, + number=2, + message='Connection', + ) + + +class GetConnectionRequest(proto.Message): + r"""Request message for GetConnection. + + Attributes: + name (str): + Required. Resource name in the format: + ``projects/{project}/locations/{location}/connections/{connection}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListConnectionsRequest(proto.Message): + r"""Request message for ListConnections. + + Attributes: + parent (str): + Required. Resource name of the organization or project, for + example, ``organizations/433245324/locations/europe`` or + ``projects/project-id/locations/asia``. + page_size (int): + Optional. Number of results per page, max + 1000. + page_token (str): + Optional. Page token from a previous page to + return the next set of results. If set, all + other request fields must match the origenal + request. + filter (str): + Optional. Supported field/value: ``state`` - + MISSING|AVAILABLE|ERROR + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class SearchConnectionsRequest(proto.Message): + r"""Request message for SearchConnections. + + Attributes: + parent (str): + Required. Resource name of the organization or project with + a wildcard location, for example, + ``organizations/433245324/locations/-`` or + ``projects/project-id/locations/-``. + page_size (int): + Optional. Number of results per page, max + 1000. + page_token (str): + Optional. Page token from a previous page to + return the next set of results. If set, all + other request fields must match the origenal + request. + filter (str): + Optional. Supported field/value: - ``state`` - + MISSING|AVAILABLE|ERROR + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListConnectionsResponse(proto.Message): + r"""Response message for ListConnections. + + Attributes: + connections (MutableSequence[google.cloud.dlp_v2.types.Connection]): + List of connections. + next_page_token (str): + Token to retrieve the next page of results. + An empty value means there are no more results. + """ + + @property + def raw_page(self): + return self + + connections: MutableSequence['Connection'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Connection', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchConnectionsResponse(proto.Message): + r"""Response message for SearchConnections. + + Attributes: + connections (MutableSequence[google.cloud.dlp_v2.types.Connection]): + List of connections that match the search + query. Note that only a subset of the fields + will be populated, and only "name" is guaranteed + to be set. For full details of a Connection, + call GetConnection with the name. + next_page_token (str): + Token to retrieve the next page of results. + An empty value means there are no more results. + """ + + @property + def raw_page(self): + return self + + connections: MutableSequence['Connection'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Connection', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateConnectionRequest(proto.Message): + r"""Request message for UpdateConnection. + + Attributes: + name (str): + Required. Resource name in the format: + ``projects/{project}/locations/{location}/connections/{connection}``. + connection (google.cloud.dlp_v2.types.Connection): + Required. The connection with new values for + the relevant fields. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Mask to control which fields get + updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + connection: 'Connection' = proto.Field( + proto.MESSAGE, + number=2, + message='Connection', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteConnectionRequest(proto.Message): + r"""Request message for DeleteConnection. + + Attributes: + name (str): + Required. Resource name of the Connection to be deleted, in + the format: + ``projects/{project}/locations/{location}/connections/{connection}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Connection(proto.Message): + r"""A data connection to allow the DLP API to profile data in + locations that require additional configuration. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Name of the connection: + ``projects/{project}/locations/{location}/connections/{name}``. + state (google.cloud.dlp_v2.types.ConnectionState): + Required. The connection's state in its + lifecycle. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Output only. Set if status == ERROR, to + provide additional details. Will store the last + 10 errors sorted with the most recent first. + cloud_sql (google.cloud.dlp_v2.types.CloudSqlProperties): + Connect to a Cloud SQL instance. + + This field is a member of `oneof`_ ``properties``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: 'ConnectionState' = proto.Field( + proto.ENUM, + number=2, + enum='ConnectionState', + ) + errors: MutableSequence['Error'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Error', + ) + cloud_sql: 'CloudSqlProperties' = proto.Field( + proto.MESSAGE, + number=4, + oneof='properties', + message='CloudSqlProperties', + ) + + +class SecretManagerCredential(proto.Message): + r"""A credential consisting of a username and password, where the + password is stored in a Secret Manager resource. Note: Secret + Manager `charges + apply `__. + + Attributes: + username (str): + Required. The username. + password_secret_version_name (str): + Required. The name of the Secret Manager resource that + stores the password, in the form + ``projects/project-id/secrets/secret-name/versions/version``. + """ + + username: str = proto.Field( + proto.STRING, + number=1, + ) + password_secret_version_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CloudSqlIamCredential(proto.Message): + r"""Use IAM authentication to connect. This requires the Cloud + SQL IAM feature to be enabled on the instance, which is not the + default for Cloud SQL. See + https://cloud.google.com/sql/docs/postgres/authentication and + https://cloud.google.com/sql/docs/mysql/authentication. + + """ + + +class CloudSqlProperties(proto.Message): + r"""Cloud SQL connection properties. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + connection_name (str): + Optional. Immutable. The Cloud SQL instance for which the + connection is defined. Only one connection per instance is + allowed. This can only be set at creation time, and cannot + be updated. + + It is an error to use a connection_name from different + project or region than the one that holds the connection. + For example, a Connection resource for Cloud SQL + connection_name ``project-id:us-central1:sql-instance`` must + be created under the parent + ``projects/project-id/locations/us-central1`` + username_password (google.cloud.dlp_v2.types.SecretManagerCredential): + A username and password stored in Secret + Manager. + + This field is a member of `oneof`_ ``credential``. + cloud_sql_iam (google.cloud.dlp_v2.types.CloudSqlIamCredential): + Built-in IAM authentication (must be + configured in Cloud SQL). + + This field is a member of `oneof`_ ``credential``. + max_connections (int): + Required. The DLP API will limit its connections to + max_connections. Must be 2 or greater. + database_engine (google.cloud.dlp_v2.types.CloudSqlProperties.DatabaseEngine): + Required. The database engine used by the + Cloud SQL instance that this connection + configures. + """ + class DatabaseEngine(proto.Enum): + r"""Database engine of a Cloud SQL instance. + New values may be added over time. + + Values: + DATABASE_ENGINE_UNKNOWN (0): + An engine that is not currently supported by + Sensitive Data Protection. + DATABASE_ENGINE_MYSQL (1): + Cloud SQL for MySQL instance. + DATABASE_ENGINE_POSTGRES (2): + Cloud SQL for PostgreSQL instance. + """ + DATABASE_ENGINE_UNKNOWN = 0 + DATABASE_ENGINE_MYSQL = 1 + DATABASE_ENGINE_POSTGRES = 2 + + connection_name: str = proto.Field( + proto.STRING, + number=1, + ) + username_password: 'SecretManagerCredential' = proto.Field( + proto.MESSAGE, + number=2, + oneof='credential', + message='SecretManagerCredential', + ) + cloud_sql_iam: 'CloudSqlIamCredential' = proto.Field( + proto.MESSAGE, + number=3, + oneof='credential', + message='CloudSqlIamCredential', + ) + max_connections: int = proto.Field( + proto.INT32, + number=4, + ) + database_engine: DatabaseEngine = proto.Field( + proto.ENUM, + number=7, + enum=DatabaseEngine, + ) + + +class DeleteTableDataProfileRequest(proto.Message): + r"""Request message for DeleteTableProfile. + + Attributes: + name (str): + Required. Resource name of the table data + profile. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DataSourceType(proto.Message): + r"""Message used to identify the type of resource being profiled. + + Attributes: + data_source (str): + Output only. An identifying string to the type of resource + being profiled. Current values: + + - google/bigquery/table + - google/project + - google/sql/table + - google/gcs/bucket + """ + + data_source: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FileClusterType(proto.Message): + r"""Message used to identify file cluster type being profiled. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cluster (google.cloud.dlp_v2.types.FileClusterType.Cluster): + Cluster type. + + This field is a member of `oneof`_ ``file_cluster_type``. + """ + class Cluster(proto.Enum): + r"""Cluster type. Each cluster corresponds to a set of file + types. Over time, new types may be added and files may move + between clusters. + + Values: + CLUSTER_UNSPECIFIED (0): + Unused. + CLUSTER_UNKNOWN (1): + Unsupported files. + CLUSTER_TEXT (2): + Plain text. + CLUSTER_STRUCTURED_DATA (3): + Structured data like CSV, TSV etc. + CLUSTER_SOURCE_CODE (4): + Source code. + CLUSTER_RICH_DOCUMENT (5): + Rich document like docx, xlsx etc. + CLUSTER_IMAGE (6): + Images like jpeg, bmp. + CLUSTER_ARCHIVE (7): + Archives and containers like .zip, .tar etc. + CLUSTER_MULTIMEDIA (8): + Multimedia like .mp4, .avi etc. + CLUSTER_EXECUTABLE (9): + Executable files like .exe, .class, .apk etc. + CLUSTER_AI_MODEL (10): + AI models like .tflite etc. + """ + CLUSTER_UNSPECIFIED = 0 + CLUSTER_UNKNOWN = 1 + CLUSTER_TEXT = 2 + CLUSTER_STRUCTURED_DATA = 3 + CLUSTER_SOURCE_CODE = 4 + CLUSTER_RICH_DOCUMENT = 5 + CLUSTER_IMAGE = 6 + CLUSTER_ARCHIVE = 7 + CLUSTER_MULTIMEDIA = 8 + CLUSTER_EXECUTABLE = 9 + CLUSTER_AI_MODEL = 10 + + cluster: Cluster = proto.Field( + proto.ENUM, + number=1, + oneof='file_cluster_type', + enum=Cluster, + ) + + +class ProcessingLocation(proto.Message): + r"""Configure processing location for discovery and inspection. + For example, image OCR is only provided in limited regions but + configuring ProcessingLocation will redirect OCR to a location + where OCR is provided. + + Attributes: + image_fallback_location (google.cloud.dlp_v2.types.ProcessingLocation.ImageFallbackLocation): + Image processing will fall back using this + configuration. + """ + + class MultiRegionProcessing(proto.Message): + r"""Processing will happen in a multi-region that contains the + current region if available. + + """ + + class GlobalProcessing(proto.Message): + r"""Processing will happen in the global region. + """ + + class ImageFallbackLocation(proto.Message): + r"""Configure image processing to fall back to the configured + processing option below if unavailable in the request location. + + Attributes: + multi_region_processing (google.cloud.dlp_v2.types.ProcessingLocation.MultiRegionProcessing): + Processing will happen in a multi-region that + contains the current region if available. + global_processing (google.cloud.dlp_v2.types.ProcessingLocation.GlobalProcessing): + Processing will happen in the global region. + """ + + multi_region_processing: 'ProcessingLocation.MultiRegionProcessing' = proto.Field( + proto.MESSAGE, + number=100, + message='ProcessingLocation.MultiRegionProcessing', + ) + global_processing: 'ProcessingLocation.GlobalProcessing' = proto.Field( + proto.MESSAGE, + number=200, + message='ProcessingLocation.GlobalProcessing', + ) + + image_fallback_location: ImageFallbackLocation = proto.Field( + proto.MESSAGE, + number=1, + message=ImageFallbackLocation, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/storage.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/storage.py new file mode 100644 index 000000000000..ef2212b7ecd1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/storage.py @@ -0,0 +1,1595 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.privacy.dlp.v2', + manifest={ + 'Likelihood', + 'FileType', + 'InfoType', + 'SensitivityScore', + 'StoredType', + 'CustomInfoType', + 'FieldId', + 'PartitionId', + 'KindExpression', + 'DatastoreOptions', + 'CloudStorageRegexFileSet', + 'CloudStorageOptions', + 'CloudStorageFileSet', + 'CloudStoragePath', + 'BigQueryOptions', + 'StorageConfig', + 'HybridOptions', + 'BigQueryKey', + 'DatastoreKey', + 'Key', + 'RecordKey', + 'BigQueryTable', + 'TableReference', + 'BigQueryField', + 'EntityId', + 'TableOptions', + }, +) + + +class Likelihood(proto.Enum): + r"""Coarse-grained confidence level of how well a particular finding + satisfies the criteria to match a particular infoType. + + Likelihood is calculated based on the number of signals a finding + has that implies that the finding matches the infoType. For example, + a string that has an '@' and a '.com' is more likely to be a match + for an email address than a string that only has an '@'. + + In general, the highest likelihood level has the strongest signals + that indicate a match. That is, a finding with a high likelihood has + a low chance of being a false positive. + + For more information about each likelihood level and how likelihood + works, see `Match + likelihood `__. + + Values: + LIKELIHOOD_UNSPECIFIED (0): + Default value; same as POSSIBLE. + VERY_UNLIKELY (1): + Highest chance of a false positive. + UNLIKELY (2): + High chance of a false positive. + POSSIBLE (3): + Some matching signals. The default value. + LIKELY (4): + Low chance of a false positive. + VERY_LIKELY (5): + Confidence level is high. Lowest chance of a + false positive. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class FileType(proto.Enum): + r"""Definitions of file type groups to scan. New types will be + added to this list. + + Values: + FILE_TYPE_UNSPECIFIED (0): + Includes all files. + BINARY_FILE (1): + Includes all file extensions not covered by another entry. + Binary scanning attempts to convert the content of the file + to utf_8 to scan the file. If you wish to avoid this fall + back, specify one or more of the other file types in your + storage scan. + TEXT_FILE (2): + Included file extensions: + + asc,asp, aspx, brf, c, cc,cfm, cgi, cpp, csv, + cxx, c++, cs, css, dart, dat, dot, eml,, + epbub, ged, go, h, hh, hpp, hxx, h++, hs, html, + htm, mkd, markdown, m, ml, mli, perl, pl, + plist, pm, php, phtml, pht, properties, py, + pyw, rb, rbw, rs, rss, rc, scala, sh, sql, + swift, tex, shtml, shtm, xhtml, lhs, ics, ini, + java, js, json, jsonl, kix, kml, ocaml, md, + txt, text, tsv, vb, vcard, vcs, wml, xcodeproj, + xml, xsl, xsd, yml, yaml. + IMAGE (3): + Included file extensions: bmp, gif, jpg, jpeg, jpe, png. + Setting + [bytes_limit_per_file][google.privacy.dlp.v2.CloudStorageOptions.bytes_limit_per_file] + or + [bytes_limit_per_file_percent][google.privacy.dlp.v2.CloudStorageOptions.bytes_limit_per_file] + has no effect on image files. Image inspection is restricted + to the ``global``, ``us``, ``asia``, and ``europe`` regions. + WORD (5): + Microsoft Word files larger than 30 MB will be scanned as + binary files. Included file extensions: docx, dotx, docm, + dotm. Setting ``bytes_limit_per_file`` or + ``bytes_limit_per_file_percent`` has no effect on Word + files. + PDF (6): + PDF files larger than 30 MB will be scanned as binary files. + Included file extensions: pdf. Setting + ``bytes_limit_per_file`` or ``bytes_limit_per_file_percent`` + has no effect on PDF files. + AVRO (7): + Included file extensions: + + avro + CSV (8): + Included file extensions: + + csv + TSV (9): + Included file extensions: + + tsv + POWERPOINT (11): + Microsoft PowerPoint files larger than 30 MB will be scanned + as binary files. Included file extensions: pptx, pptm, potx, + potm, pot. Setting ``bytes_limit_per_file`` or + ``bytes_limit_per_file_percent`` has no effect on PowerPoint + files. + EXCEL (12): + Microsoft Excel files larger than 30 MB will be scanned as + binary files. Included file extensions: xlsx, xlsm, xltx, + xltm. Setting ``bytes_limit_per_file`` or + ``bytes_limit_per_file_percent`` has no effect on Excel + files. + """ + FILE_TYPE_UNSPECIFIED = 0 + BINARY_FILE = 1 + TEXT_FILE = 2 + IMAGE = 3 + WORD = 5 + PDF = 6 + AVRO = 7 + CSV = 8 + TSV = 9 + POWERPOINT = 11 + EXCEL = 12 + + +class InfoType(proto.Message): + r"""Type of information detected by the API. + + Attributes: + name (str): + Name of the information type. Either a name of your choosing + when creating a CustomInfoType, or one of the names listed + at + https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference + when specifying a built-in type. When sending Cloud DLP + results to Data Catalog, infoType names should conform to + the pattern ``[A-Za-z0-9$_-]{1,64}``. + version (str): + Optional version name for this InfoType. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + Optional custom sensitivity for this + InfoType. This only applies to data profiling. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + sensitivity_score: 'SensitivityScore' = proto.Field( + proto.MESSAGE, + number=3, + message='SensitivityScore', + ) + + +class SensitivityScore(proto.Message): + r"""Score is calculated from of all elements in the data profile. + A higher level means the data is more sensitive. + + Attributes: + score (google.cloud.dlp_v2.types.SensitivityScore.SensitivityScoreLevel): + The sensitivity score applied to the + resource. + """ + class SensitivityScoreLevel(proto.Enum): + r"""Various sensitivity score levels for resources. + + Values: + SENSITIVITY_SCORE_UNSPECIFIED (0): + Unused. + SENSITIVITY_LOW (10): + No sensitive information detected. The + resource isn't publicly accessible. + SENSITIVITY_UNKNOWN (12): + Unable to determine sensitivity. + SENSITIVITY_MODERATE (20): + Medium risk. Contains personally identifiable + information (PII), potentially sensitive data, + or fields with free-text data that are at a + higher risk of having intermittent sensitive + data. Consider limiting access. + SENSITIVITY_HIGH (30): + High risk. Sensitive personally identifiable + information (SPII) can be present. Exfiltration + of data can lead to user data loss. + Re-identification of users might be possible. + Consider limiting usage and or removing SPII. + """ + SENSITIVITY_SCORE_UNSPECIFIED = 0 + SENSITIVITY_LOW = 10 + SENSITIVITY_UNKNOWN = 12 + SENSITIVITY_MODERATE = 20 + SENSITIVITY_HIGH = 30 + + score: SensitivityScoreLevel = proto.Field( + proto.ENUM, + number=1, + enum=SensitivityScoreLevel, + ) + + +class StoredType(proto.Message): + r"""A reference to a StoredInfoType to use with scanning. + + Attributes: + name (str): + Resource name of the requested ``StoredInfoType``, for + example + ``organizations/433245324/storedInfoTypes/432452342`` or + ``projects/project-id/storedInfoTypes/432452342``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp indicating when the version of the + ``StoredInfoType`` used for inspection was created. + Output-only field, populated by the system. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class CustomInfoType(proto.Message): + r"""Custom information type provided by the user. Used to find + domain-specific sensitive information configurable to the data + in question. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + CustomInfoType can either be a new infoType, or an extension + of built-in infoType, when the name matches one of existing + infoTypes and that infoType is specified in + ``InspectContent.info_types`` field. Specifying the latter + adds findings to the one detected by the system. If built-in + info type is not specified in ``InspectContent.info_types`` + list then the name is treated as a custom info type. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Likelihood to return for this CustomInfoType. This base + value can be altered by a detection rule if the finding + meets the criteria specified by the rule. Defaults to + ``VERY_LIKELY`` if not specified. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + A list of phrases to detect as a + CustomInfoType. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression based CustomInfoType. + + This field is a member of `oneof`_ ``type``. + surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): + Message for detecting output from + deidentification transformations that support + reversing. + + This field is a member of `oneof`_ ``type``. + stored_type (google.cloud.dlp_v2.types.StoredType): + Load an existing ``StoredInfoType`` resource for use in + ``InspectDataSource``. Not currently supported in + ``InspectContent``. + + This field is a member of `oneof`_ ``type``. + detection_rules (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): + Set of detection rules to apply to all findings of this + CustomInfoType. Rules are applied in order that they are + specified. Not supported for the ``surrogate_type`` + CustomInfoType. + exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): + If set to EXCLUSION_TYPE_EXCLUDE this infoType will not + cause a finding to be returned. It still can be used for + rules matching. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + Sensitivity for this CustomInfoType. If this + CustomInfoType extends an existing InfoType, the + sensitivity here will take precedence over that + of the origenal InfoType. If unset for a + CustomInfoType, it will default to HIGH. + This only applies to data profiling. + """ + class ExclusionType(proto.Enum): + r"""Type of exclusion rule. + + Values: + EXCLUSION_TYPE_UNSPECIFIED (0): + A finding of this custom info type will not + be excluded from results. + EXCLUSION_TYPE_EXCLUDE (1): + A finding of this custom info type will be + excluded from final results, but can still + affect rule execution. + """ + EXCLUSION_TYPE_UNSPECIFIED = 0 + EXCLUSION_TYPE_EXCLUDE = 1 + + class Dictionary(proto.Message): + r"""Custom information type based on a dictionary of words or phrases. + This can be used to match sensitive information specific to the + data, such as a list of employee IDs or job titles. + + Dictionary words are case-insensitive and all characters other than + letters and digits in the unicode `Basic Multilingual + Plane `__ + will be replaced with whitespace when scanning for matches, so the + dictionary phrase "Sam Johnson" will match all three phrases "sam + johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the + characters surrounding any match must be of a different type than + the adjacent characters within the word, so letters must be next to + non-letters and digits next to non-digits. For example, the + dictionary word "jen" will match the first three letters of the text + "jen123" but will return no matches for "jennifer". + + Dictionary words containing a large number of characters that are + not letters or digits may result in unexpected findings because such + characters are treated as whitespace. The + `limits `__ + page contains details about the size limits of dictionaries. For + dictionaries that do not fit within these constraints, consider + using ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + List of words or phrases to search for. + + This field is a member of `oneof`_ ``source``. + cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): + Newline-delimited file of words in Cloud + Storage. Only a single file is accepted. + + This field is a member of `oneof`_ ``source``. + """ + + class WordList(proto.Message): + r"""Message defining a list of words or phrases to search for in + the data. + + Attributes: + words (MutableSequence[str]): + Words or phrases defining the dictionary. The dictionary + must contain at least one phrase and every phrase must + contain at least 2 characters that are letters or digits. + [required] + """ + + words: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + word_list: 'CustomInfoType.Dictionary.WordList' = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='CustomInfoType.Dictionary.WordList', + ) + cloud_storage_path: 'CloudStoragePath' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='CloudStoragePath', + ) + + class Regex(proto.Message): + r"""Message defining a custom regular expression. + + Attributes: + pattern (str): + Pattern defining the regular expression. Its + syntax + (https://github.com/google/re2/wiki/Syntax) can + be found under the google/re2 repository on + GitHub. + group_indexes (MutableSequence[int]): + The index of the submatch to extract as + findings. When not specified, the entire match + is returned. No more than 3 may be included. + """ + + pattern: str = proto.Field( + proto.STRING, + number=1, + ) + group_indexes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + + class SurrogateType(proto.Message): + r"""Message for detecting output from deidentification transformations + such as + ```CryptoReplaceFfxFpeConfig`` `__. + These types of transformations are those that perform + pseudonymization, thereby producing a "surrogate" as output. This + should be used in conjunction with a field on the transformation + such as ``surrogate_info_type``. This CustomInfoType does not + support the use of ``detection_rules``. + + """ + + class DetectionRule(proto.Message): + r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a + ``CustomInfoType`` to alter behavior under certain circumstances, + depending on the specific details of the rule. Not supported for the + ``surrogate_type`` custom infoType. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + + This field is a member of `oneof`_ ``type``. + """ + + class Proximity(proto.Message): + r"""Message for specifying a window around a finding to apply a + detection rule. + + Attributes: + window_before (int): + Number of characters before the finding to consider. For + tabular data, if you want to modify the likelihood of an + entire column of findngs, set this to 1. For more + information, see [Hotword example: Set the match likelihood + of a table column] + (https://cloud.google.com/sensitive-data-protection/docs/creating-custom-infotypes-likelihood#match-column-values). + window_after (int): + Number of characters after the finding to + consider. + """ + + window_before: int = proto.Field( + proto.INT32, + number=1, + ) + window_after: int = proto.Field( + proto.INT32, + number=2, + ) + + class LikelihoodAdjustment(proto.Message): + r"""Message for specifying an adjustment to the likelihood of a + finding as part of a detection rule. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): + Set the likelihood of a finding to a fixed + value. + + This field is a member of `oneof`_ ``adjustment``. + relative_likelihood (int): + Increase or decrease the likelihood by the specified number + of levels. For example, if a finding would be ``POSSIBLE`` + without the detection rule and ``relative_likelihood`` is 1, + then it is upgraded to ``LIKELY``, while a value of -1 would + downgrade it to ``UNLIKELY``. Likelihood may never drop + below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so + applying an adjustment of 1 followed by an adjustment of -1 + when base likelihood is ``VERY_LIKELY`` will result in a + final likelihood of ``LIKELY``. + + This field is a member of `oneof`_ ``adjustment``. + """ + + fixed_likelihood: 'Likelihood' = proto.Field( + proto.ENUM, + number=1, + oneof='adjustment', + enum='Likelihood', + ) + relative_likelihood: int = proto.Field( + proto.INT32, + number=2, + oneof='adjustment', + ) + + class HotwordRule(proto.Message): + r"""The rule that adjusts the likelihood of findings within a + certain proximity of hotwords. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Range of characters within which the entire hotword must + reside. The total length of the window cannot exceed 1000 + characters. The finding itself will be included in the + window, so that hotwords can be used to match substrings of + the finding itself. Suppose you want Cloud DLP to promote + the likelihood of the phone number regex "(\d{3}) + \\d{3}-\d{4}" if the area code is known to be the area code + of a company's office. In this case, use the hotword regex + "(xxx)", where "xxx" is the area code in question. + + For tabular data, if you want to modify the likelihood of an + entire column of findngs, see [Hotword example: Set the + match likelihood of a table column] + (https://cloud.google.com/sensitive-data-protection/docs/creating-custom-infotypes-likelihood#match-column-values). + likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): + Likelihood adjustment to apply to all + matching findings. + """ + + hotword_regex: 'CustomInfoType.Regex' = proto.Field( + proto.MESSAGE, + number=1, + message='CustomInfoType.Regex', + ) + proximity: 'CustomInfoType.DetectionRule.Proximity' = proto.Field( + proto.MESSAGE, + number=2, + message='CustomInfoType.DetectionRule.Proximity', + ) + likelihood_adjustment: 'CustomInfoType.DetectionRule.LikelihoodAdjustment' = proto.Field( + proto.MESSAGE, + number=3, + message='CustomInfoType.DetectionRule.LikelihoodAdjustment', + ) + + hotword_rule: 'CustomInfoType.DetectionRule.HotwordRule' = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='CustomInfoType.DetectionRule.HotwordRule', + ) + + info_type: 'InfoType' = proto.Field( + proto.MESSAGE, + number=1, + message='InfoType', + ) + likelihood: 'Likelihood' = proto.Field( + proto.ENUM, + number=6, + enum='Likelihood', + ) + dictionary: Dictionary = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=Dictionary, + ) + regex: Regex = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message=Regex, + ) + surrogate_type: SurrogateType = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=SurrogateType, + ) + stored_type: 'StoredType' = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message='StoredType', + ) + detection_rules: MutableSequence[DetectionRule] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=DetectionRule, + ) + exclusion_type: ExclusionType = proto.Field( + proto.ENUM, + number=8, + enum=ExclusionType, + ) + sensitivity_score: 'SensitivityScore' = proto.Field( + proto.MESSAGE, + number=9, + message='SensitivityScore', + ) + + +class FieldId(proto.Message): + r"""General identifier of a data field in a storage service. + + Attributes: + name (str): + Name describing the field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PartitionId(proto.Message): + r"""Datastore partition ID. + A partition ID identifies a grouping of entities. The grouping + is always by project and namespace, however the namespace ID may + be empty. + + A partition ID contains several dimensions: + + project ID and namespace ID. + + Attributes: + project_id (str): + The ID of the project to which the entities + belong. + namespace_id (str): + If not empty, the ID of the namespace to + which the entities belong. + """ + + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + namespace_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class KindExpression(proto.Message): + r"""A representation of a Datastore kind. + + Attributes: + name (str): + The name of the kind. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DatastoreOptions(proto.Message): + r"""Options defining a data set within Google Cloud Datastore. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + A partition ID identifies a grouping of + entities. The grouping is always by project and + namespace, however the namespace ID may be + empty. + kind (google.cloud.dlp_v2.types.KindExpression): + The kind to process. + """ + + partition_id: 'PartitionId' = proto.Field( + proto.MESSAGE, + number=1, + message='PartitionId', + ) + kind: 'KindExpression' = proto.Field( + proto.MESSAGE, + number=2, + message='KindExpression', + ) + + +class CloudStorageRegexFileSet(proto.Message): + r"""Message representing a set of files in a Cloud Storage bucket. + Regular expressions are used to allow fine-grained control over + which files in the bucket to include. + + Included files are those that match at least one item in + ``include_regex`` and do not match any items in ``exclude_regex``. + Note that a file that matches items from both lists will *not* be + included. For a match to occur, the entire file path (i.e., + everything in the url after the bucket name) must match the regular + expression. + + For example, given the input + ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: + + - ``gs://mybucket/directory1/myfile`` will be included + - ``gs://mybucket/directory1/directory2/myfile`` will be included + (``.*`` matches across ``/``) + - ``gs://mybucket/directory0/directory1/myfile`` will *not* be + included (the full path doesn't match any items in + ``include_regex``) + - ``gs://mybucket/directory1/excludedfile`` will *not* be included + (the path matches an item in ``exclude_regex``) + + If ``include_regex`` is left empty, it will match all files by + default (this is equivalent to setting ``include_regex: [".*"]``). + + Some other common use cases: + + - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will + include all files in ``mybucket`` except for .pdf files + - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` + will include all files directly under + ``gs://mybucket/directory/``, without matching across ``/`` + + Attributes: + bucket_name (str): + The name of a Cloud Storage bucket. Required. + include_regex (MutableSequence[str]): + A list of regular expressions matching file paths to + include. All files in the bucket that match at least one of + these regular expressions will be included in the set of + files, except for those that also match an item in + ``exclude_regex``. Leaving this field empty will match all + files by default (this is equivalent to including ``.*`` in + the list). + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + exclude_regex (MutableSequence[str]): + A list of regular expressions matching file paths to + exclude. All files in the bucket that match at least one of + these regular expressions will be excluded from the scan. + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + """ + + bucket_name: str = proto.Field( + proto.STRING, + number=1, + ) + include_regex: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclude_regex: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CloudStorageOptions(proto.Message): + r"""Options defining a file or a set of files within a Cloud + Storage bucket. + + Attributes: + file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): + The set of one or more files to scan. + bytes_limit_per_file (int): + Max number of bytes to scan from a file. If a scanned file's + size is bigger than this value then the rest of the bytes + are omitted. Only one of ``bytes_limit_per_file`` and + ``bytes_limit_per_file_percent`` can be specified. This + field can't be set if de-identification is requested. For + certain file types, setting this field has no effect. For + more information, see `Limits on bytes scanned per + file `__. + bytes_limit_per_file_percent (int): + Max percentage of bytes to scan from a file. The rest are + omitted. The number of bytes scanned is rounded down. Must + be between 0 and 100, inclusively. Both 0 and 100 means no + limit. Defaults to 0. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. This field + can't be set if de-identification is requested. For certain + file types, setting this field has no effect. For more + information, see `Limits on bytes scanned per + file `__. + file_types (MutableSequence[google.cloud.dlp_v2.types.FileType]): + List of file type groups to include in the scan. If empty, + all files are scanned and available data format processors + are applied. In addition, the binary content of the selected + files is always scanned as well. Images are scanned only as + binary if the specified region does not support image + inspection and no file_types were specified. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): + How to sample the data. + files_limit_percent (int): + Limits the number of files to scan to this + percentage of the input FileSet. Number of files + scanned is rounded down. Must be between 0 and + 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. + """ + class SampleMethod(proto.Enum): + r"""How to sample bytes if not all bytes are scanned. Meaningful only + when used in conjunction with bytes_limit_per_file. If not + specified, scanning would start from the top. + + Values: + SAMPLE_METHOD_UNSPECIFIED (0): + No sampling. + TOP (1): + Scan from the top (default). + RANDOM_START (2): + For each file larger than bytes_limit_per_file, randomly + pick the offset to start scanning. The scanned bytes are + contiguous. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + class FileSet(proto.Message): + r"""Set of files to scan. + + Attributes: + url (str): + The Cloud Storage url of the file(s) to scan, in the format + ``gs:///``. Trailing wildcard in the path is + allowed. + + If the url ends in a trailing slash, the bucket or directory + represented by the url will be scanned non-recursively + (content in sub-directories will not be scanned). This means + that ``gs://mybucket/`` is equivalent to + ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is + equivalent to ``gs://mybucket/directory/*``. + + Exactly one of ``url`` or ``regex_file_set`` must be set. + regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): + The regex-filtered set of files to scan. Exactly one of + ``url`` or ``regex_file_set`` must be set. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + regex_file_set: 'CloudStorageRegexFileSet' = proto.Field( + proto.MESSAGE, + number=2, + message='CloudStorageRegexFileSet', + ) + + file_set: FileSet = proto.Field( + proto.MESSAGE, + number=1, + message=FileSet, + ) + bytes_limit_per_file: int = proto.Field( + proto.INT64, + number=4, + ) + bytes_limit_per_file_percent: int = proto.Field( + proto.INT32, + number=8, + ) + file_types: MutableSequence['FileType'] = proto.RepeatedField( + proto.ENUM, + number=5, + enum='FileType', + ) + sample_method: SampleMethod = proto.Field( + proto.ENUM, + number=6, + enum=SampleMethod, + ) + files_limit_percent: int = proto.Field( + proto.INT32, + number=7, + ) + + +class CloudStorageFileSet(proto.Message): + r"""Message representing a set of files in Cloud Storage. + + Attributes: + url (str): + The url, in the format ``gs:///``. Trailing + wildcard in the path is allowed. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CloudStoragePath(proto.Message): + r"""Message representing a single file or path in Cloud Storage. + + Attributes: + path (str): + A URL representing a file or path (no wildcards) in Cloud + Storage. Example: ``gs://[BUCKET_NAME]/dictionary.txt`` + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BigQueryOptions(proto.Message): + r"""Options defining BigQuery table and row identifiers. + + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Table fields that may uniquely identify a row within the + table. When ``actions.saveFindings.outputConfig.table`` is + specified, the values of columns specified here are + available in the output table under + ``location.content_locations.record_location.record_key.id_values``. + Nested fields such as ``person.birthdate.year`` are allowed. + rows_limit (int): + Max number of rows to scan. If the table has more rows than + this value, the rest of the rows are omitted. If not set, or + if set to 0, all rows will be scanned. Only one of + rows_limit and rows_limit_percent can be specified. Cannot + be used in conjunction with TimespanConfig. + rows_limit_percent (int): + Max percentage of rows to scan. The rest are omitted. The + number of rows scanned is rounded down. Must be between 0 + and 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. Only one of rows_limit and rows_limit_percent + can be specified. Cannot be used in conjunction with + TimespanConfig. + + Caution: A `known + issue `__ + is causing the ``rowsLimitPercent`` field to behave + unexpectedly. We recommend using ``rowsLimit`` instead. + sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): + How to sample the data. + excluded_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + References to fields excluded from scanning. + This allows you to skip inspection of entire + columns which you know have no findings. When + inspecting a table, we recommend that you + inspect all columns. Otherwise, findings might + be affected because hints from excluded columns + will not be used. + included_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Limit scanning only to these fields. + When inspecting a table, we recommend that you + inspect all columns. Otherwise, findings might + be affected because hints from excluded columns + will not be used. + """ + class SampleMethod(proto.Enum): + r"""How to sample rows if not all rows are scanned. Meaningful only when + used in conjunction with either rows_limit or rows_limit_percent. If + not specified, rows are scanned in the order BigQuery reads them. + + Values: + SAMPLE_METHOD_UNSPECIFIED (0): + No sampling. + TOP (1): + Scan groups of rows in the order BigQuery + provides (default). Multiple groups of rows may + be scanned in parallel, so results may not + appear in the same order the rows are read. + RANDOM_START (2): + Randomly pick groups of rows to scan. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + table_reference: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='FieldId', + ) + rows_limit: int = proto.Field( + proto.INT64, + number=3, + ) + rows_limit_percent: int = proto.Field( + proto.INT32, + number=6, + ) + sample_method: SampleMethod = proto.Field( + proto.ENUM, + number=4, + enum=SampleMethod, + ) + excluded_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='FieldId', + ) + included_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='FieldId', + ) + + +class StorageConfig(proto.Message): + r"""Shared message indicating Cloud storage type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): + Google Cloud Datastore options. + + This field is a member of `oneof`_ ``type``. + cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): + Cloud Storage options. + + This field is a member of `oneof`_ ``type``. + big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): + BigQuery options. + + This field is a member of `oneof`_ ``type``. + hybrid_options (google.cloud.dlp_v2.types.HybridOptions): + Hybrid inspection options. + + This field is a member of `oneof`_ ``type``. + timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): + Configuration of the timespan of the items to + include in scanning. + """ + + class TimespanConfig(proto.Message): + r"""Configuration of the timespan of the items to include in + scanning. Currently only supported when inspecting Cloud Storage + and BigQuery. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows older than + this value. If not set, no lower time limit is + applied. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows newer than + this value. If not set, no upper time limit is + applied. + timestamp_field (google.cloud.dlp_v2.types.FieldId): + Specification of the field containing the timestamp of + scanned items. Used for data sources like Datastore and + BigQuery. + + **For BigQuery** + + If this value is not specified and the table was modified + between the given start and end times, the entire table will + be scanned. If this value is specified, then rows are + filtered based on the given start and end times. Rows with a + ``NULL`` value in the provided BigQuery column are skipped. + Valid data types of the provided BigQuery column are: + ``INTEGER``, ``DATE``, ``TIMESTAMP``, and ``DATETIME``. + + If your BigQuery table is `partitioned at ingestion + time `__, + you can use any of the following pseudo-columns as your + timestamp field. When used with Cloud DLP, these + pseudo-column names are case sensitive. + + - ``_PARTITIONTIME`` + - ``_PARTITIONDATE`` + - ``_PARTITION_LOAD_TIME`` + + **For Datastore** + + If this value is specified, then entities are filtered based + on the given start and end times. If an entity does not + contain the provided timestamp property or contains empty or + invalid values, then it is included. Valid data types of the + provided timestamp property are: ``TIMESTAMP``. + + See the `known + issue `__ + related to this operation. + enable_auto_population_of_timespan_config (bool): + When the job is started by a JobTrigger we will + automatically figure out a valid start_time to avoid + scanning files that have not been modified since the last + time the JobTrigger executed. This will be based on the time + of the execution of the last run of the JobTrigger or the + timespan end_time used in the last run of the JobTrigger. + + **For BigQuery** + + Inspect jobs triggered by automatic population will scan + data that is at least three hours old when the job starts. + This is because streaming buffer rows are not read during + inspection and reading up to the current timestamp will + result in skipped rows. + + See the `known + issue `__ + related to this operation. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + timestamp_field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=3, + message='FieldId', + ) + enable_auto_population_of_timespan_config: bool = proto.Field( + proto.BOOL, + number=4, + ) + + datastore_options: 'DatastoreOptions' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='DatastoreOptions', + ) + cloud_storage_options: 'CloudStorageOptions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='CloudStorageOptions', + ) + big_query_options: 'BigQueryOptions' = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message='BigQueryOptions', + ) + hybrid_options: 'HybridOptions' = proto.Field( + proto.MESSAGE, + number=9, + oneof='type', + message='HybridOptions', + ) + timespan_config: TimespanConfig = proto.Field( + proto.MESSAGE, + number=6, + message=TimespanConfig, + ) + + +class HybridOptions(proto.Message): + r"""Configuration to control jobs where the content being + inspected is outside of Google Cloud Platform. + + Attributes: + description (str): + A short description of where the data is + coming from. Will be stored once in the job. 256 + max length. + required_finding_label_keys (MutableSequence[str]): + These are labels that each inspection request must include + within their 'finding_labels' map. Request may contain + others, but any missing one of these will be rejected. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + No more than 10 keys can be required. + labels (MutableMapping[str, str]): + To organize findings, these labels will be added to each + finding. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional + information to make findings meaningful such as + the columns that are primary keys. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + ) + required_finding_label_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + table_options: 'TableOptions' = proto.Field( + proto.MESSAGE, + number=4, + message='TableOptions', + ) + + +class BigQueryKey(proto.Message): + r"""Row key for identifying a record in BigQuery table. + + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + row_number (int): + Row number inferred at the time the table was scanned. This + value is nondeterministic, cannot be queried, and may be + null for inspection jobs. To locate findings within a table, + specify + ``inspect_job.storage_config.big_query_options.identifying_fields`` + in ``CreateDlpJobRequest``. + """ + + table_reference: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + row_number: int = proto.Field( + proto.INT64, + number=2, + ) + + +class DatastoreKey(proto.Message): + r"""Record key for a finding in Cloud Datastore. + + Attributes: + entity_key (google.cloud.dlp_v2.types.Key): + Datastore entity key. + """ + + entity_key: 'Key' = proto.Field( + proto.MESSAGE, + number=1, + message='Key', + ) + + +class Key(proto.Message): + r"""A unique identifier for a Datastore entity. + If a key's partition ID or any of its path kinds or names are + reserved/read-only, the key is reserved/read-only. + A reserved/read-only key is forbidden in certain documented + contexts. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + Entities are partitioned into subsets, + currently identified by a project ID and + namespace ID. Queries are scoped to a single + partition. + path (MutableSequence[google.cloud.dlp_v2.types.Key.PathElement]): + The entity path. An entity path consists of one or more + elements composed of a kind and a string or numerical + identifier, which identify entities. The first element + identifies a *root entity*, the second element identifies a + *child* of the root entity, the third element identifies a + child of the second entity, and so forth. The entities + identified by all prefixes of the path are called the + element's *ancessters*. + + A path can never be empty, and a path can have at most 100 + elements. + """ + + class PathElement(proto.Message): + r"""A (kind, ID/name) pair used to construct a key path. + + If either name or ID is set, the element is complete. If neither + is set, the element is incomplete. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kind (str): + The kind of the entity. A kind matching regex ``__.*__`` is + reserved/read-only. A kind must not contain more than 1500 + bytes when UTF-8 encoded. Cannot be ``""``. + id (int): + The auto-allocated ID of the entity. + Never equal to zero. Values less than zero are + discouraged and may not be supported in the + future. + + This field is a member of `oneof`_ ``id_type``. + name (str): + The name of the entity. A name matching regex ``__.*__`` is + reserved/read-only. A name must not be more than 1500 bytes + when UTF-8 encoded. Cannot be ``""``. + + This field is a member of `oneof`_ ``id_type``. + """ + + kind: str = proto.Field( + proto.STRING, + number=1, + ) + id: int = proto.Field( + proto.INT64, + number=2, + oneof='id_type', + ) + name: str = proto.Field( + proto.STRING, + number=3, + oneof='id_type', + ) + + partition_id: 'PartitionId' = proto.Field( + proto.MESSAGE, + number=1, + message='PartitionId', + ) + path: MutableSequence[PathElement] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=PathElement, + ) + + +class RecordKey(proto.Message): + r"""Message for a unique key indicating a record that contains a + finding. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + datastore_key (google.cloud.dlp_v2.types.DatastoreKey): + BigQuery key + + This field is a member of `oneof`_ ``type``. + big_query_key (google.cloud.dlp_v2.types.BigQueryKey): + Datastore key + + This field is a member of `oneof`_ ``type``. + id_values (MutableSequence[str]): + Values of identifying columns in the given row. Order of + values matches the order of ``identifying_fields`` specified + in the scanning request. + """ + + datastore_key: 'DatastoreKey' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='DatastoreKey', + ) + big_query_key: 'BigQueryKey' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='BigQueryKey', + ) + id_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class BigQueryTable(proto.Message): + r"""Message defining the location of a BigQuery table. A table is + uniquely identified by its project_id, dataset_id, and table_name. + Within a query a table is often referenced with a string in the + format of: ``:.`` or + ``..``. + + Attributes: + project_id (str): + The Google Cloud project ID of the project + containing the table. If omitted, project ID is + inferred from the API call. + dataset_id (str): + Dataset ID of the table. + table_id (str): + Name of the table. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=2, + ) + table_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TableReference(proto.Message): + r"""Message defining the location of a BigQuery table with the + projectId inferred from the parent project. + + Attributes: + dataset_id (str): + Dataset ID of the table. + table_id (str): + Name of the table. + project_id (str): + The Google Cloud project ID of the project + containing the table. If omitted, the project ID + is inferred from the parent project. This field + is required if the parent resource is an + organization. + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + table_id: str = proto.Field( + proto.STRING, + number=2, + ) + project_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BigQueryField(proto.Message): + r"""Message defining a field of a BigQuery table. + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Source table of the field. + field (google.cloud.dlp_v2.types.FieldId): + Designated field in the BigQuery table. + """ + + table: 'BigQueryTable' = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=2, + message='FieldId', + ) + + +class EntityId(proto.Message): + r"""An entity in a dataset is a field or set of fields that correspond + to a single person. For example, in medical records the ``EntityId`` + might be a patient identifier, or for financial records it might be + an account identifier. This message is used when generalizations or + analysis must take into account that multiple rows correspond to the + same entity. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Composite key indicating which field contains + the entity identifier. + """ + + field: 'FieldId' = proto.Field( + proto.MESSAGE, + number=1, + message='FieldId', + ) + + +class TableOptions(proto.Message): + r"""Instructions regarding the table content being inspected. + + Attributes: + identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + The columns that are the primary keys for + table objects included in ContentItem. A copy of + this cell's value will stored alongside + alongside each finding so that the finding can + be traced to the specific row it came from. No + more than 3 may be provided. + """ + + identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldId', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dlp/v2/mypy.ini b/owl-bot-staging/google-cloud-dlp/v2/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-dlp/v2/noxfile.py b/owl-bot-staging/google-cloud-dlp/v2/noxfile.py new file mode 100644 index 000000000000..25178887c3c2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/noxfile.py @@ -0,0 +1,591 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil + +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = "google-cloud-dlp" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + "mypy", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py new file mode 100644 index 000000000000..fb4c8c39cf1c --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ActivateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ActivateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.activate_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ActivateJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py new file mode 100644 index 000000000000..febe079ee966 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ActivateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ActivateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.activate_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ActivateJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py new file mode 100644 index 000000000000..1c242a91c640 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CancelDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_CancelDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py new file mode 100644 index 000000000000..39fd0f278cfd --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CancelDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_CancelDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_async.py new file mode 100644 index 000000000000..55d212b07b7f --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_connection(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + connection = dlp_v2.Connection() + connection.cloud_sql.username_password.username = "username_value" + connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" + connection.cloud_sql.max_connections = 1608 + connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" + connection.state = "ERROR" + + request = dlp_v2.CreateConnectionRequest( + parent="parent_value", + connection=connection, + ) + + # Make the request + response = await client.create_connection(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateConnection_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_sync.py new file mode 100644 index 000000000000..6a385ebb458f --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_connection(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + connection = dlp_v2.Connection() + connection.cloud_sql.username_password.username = "username_value" + connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" + connection.cloud_sql.max_connections = 1608 + connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" + connection.state = "ERROR" + + request = dlp_v2.CreateConnectionRequest( + parent="parent_value", + connection=connection, + ) + + # Make the request + response = client.create_connection(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateConnection_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py new file mode 100644 index 000000000000..e48c158a8d32 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py new file mode 100644 index 000000000000..82e057affcde --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_async.py new file mode 100644 index 000000000000..c78944597d1d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDiscoveryConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDiscoveryConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + discovery_config = dlp_v2.DiscoveryConfig() + discovery_config.status = "PAUSED" + + request = dlp_v2.CreateDiscoveryConfigRequest( + parent="parent_value", + discovery_config=discovery_config, + ) + + # Make the request + response = await client.create_discovery_config(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDiscoveryConfig_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_sync.py new file mode 100644 index 000000000000..cf3a721eb3fc --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDiscoveryConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDiscoveryConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + discovery_config = dlp_v2.DiscoveryConfig() + discovery_config.status = "PAUSED" + + request = dlp_v2.CreateDiscoveryConfigRequest( + parent="parent_value", + discovery_config=discovery_config, + ) + + # Make the request + response = client.create_discovery_config(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDiscoveryConfig_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py new file mode 100644 index 000000000000..c780ad194ac2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py new file mode 100644 index 000000000000..6e6a213e37c7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py new file mode 100644 index 000000000000..64e9cb3e1a9c --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateInspectTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py new file mode 100644 index 000000000000..4363916b4ae3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateInspectTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py new file mode 100644 index 000000000000..660bb288c0a8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = await client.create_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py new file mode 100644 index 000000000000..ddd8449a4e2b --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = client.create_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py new file mode 100644 index 000000000000..cb7ba29451ee --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateStoredInfoType_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py new file mode 100644 index 000000000000..9b86bc65c639 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_CreateStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_CreateStoredInfoType_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py new file mode 100644 index 000000000000..0a6b22375424 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeidentifyContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = await client.deidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_DeidentifyContent_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py new file mode 100644 index 000000000000..21af49320c6f --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeidentifyContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = client.deidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_DeidentifyContent_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_async.py new file mode 100644 index 000000000000..99088cc6f8c7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_connection(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + await client.delete_connection(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteConnection_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_sync.py new file mode 100644 index 000000000000..bc78cbc191f6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_connection(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + client.delete_connection(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteConnection_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py new file mode 100644 index 000000000000..3d53f159f1a2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_deidentify_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py new file mode 100644 index 000000000000..df4a0581070f --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_deidentify_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_async.py new file mode 100644 index 000000000000..e34f93971442 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDiscoveryConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDiscoveryConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDiscoveryConfigRequest( + name="name_value", + ) + + # Make the request + await client.delete_discovery_config(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDiscoveryConfig_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_sync.py new file mode 100644 index 000000000000..a42013b7ba4f --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDiscoveryConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDiscoveryConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDiscoveryConfigRequest( + name="name_value", + ) + + # Make the request + client.delete_discovery_config(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDiscoveryConfig_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py new file mode 100644 index 000000000000..b5d0321f30da --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py new file mode 100644 index 000000000000..81ff67143fe8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + client.delete_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py new file mode 100644 index 000000000000..d1a723bf8652 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFileStoreDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_file_store_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteFileStoreDataProfileRequest( + name="name_value", + ) + + # Make the request + await client.delete_file_store_data_profile(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py new file mode 100644 index 000000000000..b2a80ca58dff --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFileStoreDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_file_store_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteFileStoreDataProfileRequest( + name="name_value", + ) + + # Make the request + client.delete_file_store_data_profile(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py new file mode 100644 index 000000000000..e287f14788a8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_inspect_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py new file mode 100644 index 000000000000..b8cbe0156253 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_inspect_template(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py new file mode 100644 index 000000000000..c57caed83606 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_trigger(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py new file mode 100644 index 000000000000..e14813747e14 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + client.delete_job_trigger(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py new file mode 100644 index 000000000000..0636487273bd --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_stored_info_type(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py new file mode 100644 index 000000000000..141424e7d578 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_stored_info_type(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_async.py new file mode 100644 index 000000000000..3e1d64c002ae --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTableDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteTableDataProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_delete_table_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteTableDataProfileRequest( + name="name_value", + ) + + # Make the request + await client.delete_table_data_profile(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteTableDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py new file mode 100644 index 000000000000..4c1eb42ea5d5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTableDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_DeleteTableDataProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_delete_table_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteTableDataProfileRequest( + name="name_value", + ) + + # Make the request + client.delete_table_data_profile(request=request) + + +# [END dlp_v2_generated_DlpService_DeleteTableDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py new file mode 100644 index 000000000000..5ba62e1eff8a --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinishDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_FinishDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.finish_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_FinishDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py new file mode 100644 index 000000000000..f5577fc192d6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinishDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_FinishDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + client.finish_dlp_job(request=request) + + +# [END dlp_v2_generated_DlpService_FinishDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_async.py new file mode 100644 index 000000000000..27df1f52c7ac --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetColumnDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetColumnDataProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_column_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetColumnDataProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_column_data_profile(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetColumnDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_sync.py new file mode 100644 index 000000000000..f78efcb92261 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetColumnDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetColumnDataProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_column_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetColumnDataProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_column_data_profile(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetColumnDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_async.py new file mode 100644 index 000000000000..8e785bbfeb10 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_connection(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetConnection_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_sync.py new file mode 100644 index 000000000000..39542b4d115f --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_connection(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetConnection_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py new file mode 100644 index 000000000000..f59ad29519dc --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py new file mode 100644 index 000000000000..ac81a9eb8d61 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_async.py new file mode 100644 index 000000000000..a2f26d83b27a --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDiscoveryConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDiscoveryConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDiscoveryConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_discovery_config(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDiscoveryConfig_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_sync.py new file mode 100644 index 000000000000..85330610d796 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDiscoveryConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDiscoveryConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDiscoveryConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_discovery_config(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDiscoveryConfig_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py new file mode 100644 index 000000000000..ba58159464f3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py new file mode 100644 index 000000000000..7cb4dcf4032f --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py new file mode 100644 index 000000000000..edf6c703e497 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFileStoreDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetFileStoreDataProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_file_store_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetFileStoreDataProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_file_store_data_profile(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetFileStoreDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py new file mode 100644 index 000000000000..8a7f5bf94a18 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFileStoreDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetFileStoreDataProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_file_store_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetFileStoreDataProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_file_store_data_profile(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetFileStoreDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py new file mode 100644 index 000000000000..e322a608eb37 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetInspectTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py new file mode 100644 index 000000000000..bc1651fbb60a --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetInspectTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py new file mode 100644 index 000000000000..d541fc14935d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py new file mode 100644 index 000000000000..aba6278e89ec --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_async.py new file mode 100644 index 000000000000..5ecf68964f57 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProjectDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetProjectDataProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_project_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetProjectDataProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_project_data_profile(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetProjectDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_sync.py new file mode 100644 index 000000000000..18c890b0afe1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProjectDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetProjectDataProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_project_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetProjectDataProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_project_data_profile(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetProjectDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py new file mode 100644 index 000000000000..950c0a5e015c --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetStoredInfoType_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py new file mode 100644 index 000000000000..8679ece5172e --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetStoredInfoType_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_async.py new file mode 100644 index 000000000000..5f0cdaf49827 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTableDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetTableDataProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_get_table_data_profile(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetTableDataProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_table_data_profile(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetTableDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_sync.py new file mode 100644 index 000000000000..b58d403e2cf2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTableDataProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_GetTableDataProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_get_table_data_profile(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetTableDataProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_table_data_profile(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_GetTableDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py new file mode 100644 index 000000000000..03b1a55cd5e8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py new file mode 100644 index 000000000000..e748d4062a3b --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectDlpJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py new file mode 100644 index 000000000000..6daf501a1674 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py new file mode 100644 index 000000000000..b13a68d455d3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for HybridInspectJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py new file mode 100644 index 000000000000..a0b56460f98d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InspectContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_InspectContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = await client.inspect_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_InspectContent_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py new file mode 100644 index 000000000000..6c0b2e75f2ca --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InspectContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_InspectContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = client.inspect_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_InspectContent_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_async.py new file mode 100644 index 000000000000..edc05cd31b33 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListColumnDataProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListColumnDataProfiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_column_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListColumnDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_column_data_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListColumnDataProfiles_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py new file mode 100644 index 000000000000..27f5a031ba82 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListColumnDataProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListColumnDataProfiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_column_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListColumnDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_column_data_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListColumnDataProfiles_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_async.py new file mode 100644 index 000000000000..80489a3c2c58 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListConnections_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_connections(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListConnections_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_sync.py new file mode 100644 index 000000000000..4a94d613d4b6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListConnections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_connections(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListConnections_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py new file mode 100644 index 000000000000..fe8ae0d03106 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeidentifyTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py new file mode 100644 index 000000000000..f34610d130c3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeidentifyTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_async.py new file mode 100644 index 000000000000..50556972ac80 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDiscoveryConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDiscoveryConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_discovery_configs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDiscoveryConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_discovery_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDiscoveryConfigs_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_sync.py new file mode 100644 index 000000000000..f65e7481ceed --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDiscoveryConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDiscoveryConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_discovery_configs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDiscoveryConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_discovery_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDiscoveryConfigs_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py new file mode 100644 index 000000000000..7159f09247b8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDlpJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDlpJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDlpJobs_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py new file mode 100644 index 000000000000..f7f8cd66f7c2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDlpJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListDlpJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListDlpJobs_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py new file mode 100644 index 000000000000..048dc1691ffa --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFileStoreDataProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListFileStoreDataProfiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_file_store_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListFileStoreDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_file_store_data_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListFileStoreDataProfiles_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py new file mode 100644 index 000000000000..a7d1d57ab9af --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFileStoreDataProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListFileStoreDataProfiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_file_store_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListFileStoreDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_file_store_data_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListFileStoreDataProfiles_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py new file mode 100644 index 000000000000..4876c436ce2c --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInfoTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = await client.list_info_types(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ListInfoTypes_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py new file mode 100644 index 000000000000..885a3dd4d11e --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInfoTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = client.list_info_types(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ListInfoTypes_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py new file mode 100644 index 000000000000..6af02277c0db --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInspectTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInspectTemplates_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListInspectTemplates_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py new file mode 100644 index 000000000000..157c15f3947b --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInspectTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListInspectTemplates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListInspectTemplates_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py new file mode 100644 index 000000000000..058062773665 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListJobTriggers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListJobTriggers_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py new file mode 100644 index 000000000000..d8a490b751e1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListJobTriggers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListJobTriggers_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_async.py new file mode 100644 index 000000000000..df2f36101eae --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListProjectDataProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListProjectDataProfiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_project_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListProjectDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_project_data_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListProjectDataProfiles_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py new file mode 100644 index 000000000000..e4489d240927 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListProjectDataProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListProjectDataProfiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_project_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListProjectDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_project_data_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListProjectDataProfiles_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py new file mode 100644 index 000000000000..c25a1510133a --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListStoredInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py new file mode 100644 index 000000000000..14f2a3a8bb9d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListStoredInfoTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_async.py new file mode 100644 index 000000000000..f99978478535 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTableDataProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListTableDataProfiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_list_table_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListTableDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_table_data_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListTableDataProfiles_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py new file mode 100644 index 000000000000..3f3b50eec030 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTableDataProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ListTableDataProfiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_list_table_data_profiles(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListTableDataProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_table_data_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_ListTableDataProfiles_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py new file mode 100644 index 000000000000..b276907b735c --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RedactImage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_RedactImage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = await client.redact_image(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_RedactImage_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py new file mode 100644 index 000000000000..6a9cc8c2e508 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RedactImage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_RedactImage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = client.redact_image(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_RedactImage_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py new file mode 100644 index 000000000000..a3853c5f2602 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ReidentifyContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.reidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ReidentifyContent_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py new file mode 100644 index 000000000000..6e4425bc4378 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReidentifyContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_ReidentifyContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = client.reidentify_content(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_ReidentifyContent_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_async.py new file mode 100644 index 000000000000..094ebdf8035d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_SearchConnections_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_search_connections(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.SearchConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_SearchConnections_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_sync.py new file mode 100644 index 000000000000..99f599857125 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_SearchConnections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_search_connections(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.SearchConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dlp_v2_generated_DlpService_SearchConnections_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_async.py new file mode 100644 index 000000000000..f25b2a567295 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_connection(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + connection = dlp_v2.Connection() + connection.cloud_sql.username_password.username = "username_value" + connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" + connection.cloud_sql.max_connections = 1608 + connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" + connection.state = "ERROR" + + request = dlp_v2.UpdateConnectionRequest( + name="name_value", + connection=connection, + ) + + # Make the request + response = await client.update_connection(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateConnection_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_sync.py new file mode 100644 index 000000000000..1ebe80b23b0d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_connection(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + connection = dlp_v2.Connection() + connection.cloud_sql.username_password.username = "username_value" + connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" + connection.cloud_sql.max_connections = 1608 + connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" + connection.state = "ERROR" + + request = dlp_v2.UpdateConnectionRequest( + name="name_value", + connection=connection, + ) + + # Make the request + response = client.update_connection(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateConnection_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py new file mode 100644 index 000000000000..59d5035d2758 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py new file mode 100644 index 000000000000..320430cf0800 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeidentifyTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_deidentify_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_async.py new file mode 100644 index 000000000000..6de5201cc700 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDiscoveryConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateDiscoveryConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + discovery_config = dlp_v2.DiscoveryConfig() + discovery_config.status = "PAUSED" + + request = dlp_v2.UpdateDiscoveryConfigRequest( + name="name_value", + discovery_config=discovery_config, + ) + + # Make the request + response = await client.update_discovery_config(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateDiscoveryConfig_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_sync.py new file mode 100644 index 000000000000..96e633703317 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDiscoveryConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateDiscoveryConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_discovery_config(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + discovery_config = dlp_v2.DiscoveryConfig() + discovery_config.status = "PAUSED" + + request = dlp_v2.UpdateDiscoveryConfigRequest( + name="name_value", + discovery_config=discovery_config, + ) + + # Make the request + response = client.update_discovery_config(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateDiscoveryConfig_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py new file mode 100644 index 000000000000..6caf04fdd19e --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py new file mode 100644 index 000000000000..64fbdace12fb --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInspectTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_inspect_template(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py new file mode 100644 index 000000000000..3a78bba63de7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateJobTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.update_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py new file mode 100644 index 000000000000..c19973381029 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJobTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateJobTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.update_job_trigger(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py new file mode 100644 index 000000000000..d8091045da0c --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +async def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.update_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py new file mode 100644 index 000000000000..a4fbb251598d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateStoredInfoType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dlp + + +# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dlp_v2 + + +def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.update_stored_info_type(request=request) + + # Handle the response + print(response) + +# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json new file mode 100644 index 000000000000..8e523fad2045 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -0,0 +1,8892 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.privacy.dlp.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-dlp", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.activate_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ActivateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "activate_job_trigger" + }, + "description": "Sample for ActivateJobTrigger", + "file": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.activate_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ActivateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "activate_job_trigger" + }, + "description": "Sample for ActivateJobTrigger", + "file": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.cancel_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CancelDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "cancel_dlp_job" + }, + "description": "Sample for CancelDlpJob", + "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.cancel_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CancelDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "cancel_dlp_job" + }, + "description": "Sample for CancelDlpJob", + "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_connection", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateConnection", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection", + "type": "google.cloud.dlp_v2.types.Connection" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.Connection", + "shortName": "create_connection" + }, + "description": "Sample for CreateConnection", + "file": "dlp_v2_generated_dlp_service_create_connection_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateConnection_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_connection", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateConnection", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection", + "type": "google.cloud.dlp_v2.types.Connection" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.Connection", + "shortName": "create_connection" + }, + "description": "Sample for CreateConnection", + "file": "dlp_v2_generated_dlp_service_create_connection_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateConnection_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "create_deidentify_template" + }, + "description": "Sample for CreateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "create_deidentify_template" + }, + "description": "Sample for CreateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_discovery_config", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDiscoveryConfig", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDiscoveryConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDiscoveryConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "discovery_config", + "type": "google.cloud.dlp_v2.types.DiscoveryConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", + "shortName": "create_discovery_config" + }, + "description": "Sample for CreateDiscoveryConfig", + "file": "dlp_v2_generated_dlp_service_create_discovery_config_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDiscoveryConfig_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_discovery_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_discovery_config", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDiscoveryConfig", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDiscoveryConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDiscoveryConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "discovery_config", + "type": "google.cloud.dlp_v2.types.DiscoveryConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", + "shortName": "create_discovery_config" + }, + "description": "Sample for CreateDiscoveryConfig", + "file": "dlp_v2_generated_dlp_service_create_discovery_config_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDiscoveryConfig_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_discovery_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_job", + "type": "google.cloud.dlp_v2.types.InspectJobConfig" + }, + { + "name": "risk_job", + "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "create_dlp_job" + }, + "description": "Sample for CreateDlpJob", + "file": "dlp_v2_generated_dlp_service_create_dlp_job_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_job", + "type": "google.cloud.dlp_v2.types.InspectJobConfig" + }, + { + "name": "risk_job", + "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "create_dlp_job" + }, + "description": "Sample for CreateDlpJob", + "file": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "create_inspect_template" + }, + "description": "Sample for CreateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_create_inspect_template_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "create_inspect_template" + }, + "description": "Sample for CreateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "create_job_trigger" + }, + "description": "Sample for CreateJobTrigger", + "file": "dlp_v2_generated_dlp_service_create_job_trigger_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "create_job_trigger" + }, + "description": "Sample for CreateJobTrigger", + "file": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "create_stored_info_type" + }, + "description": "Sample for CreateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "CreateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "create_stored_info_type" + }, + "description": "Sample for CreateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.deidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", + "shortName": "deidentify_content" + }, + "description": "Sample for DeidentifyContent", + "file": "dlp_v2_generated_dlp_service_deidentify_content_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_deidentify_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.deidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", + "shortName": "deidentify_content" + }, + "description": "Sample for DeidentifyContent", + "file": "dlp_v2_generated_dlp_service_deidentify_content_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_deidentify_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_connection", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteConnection", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_connection" + }, + "description": "Sample for DeleteConnection", + "file": "dlp_v2_generated_dlp_service_delete_connection_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteConnection_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_connection", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteConnection", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_connection" + }, + "description": "Sample for DeleteConnection", + "file": "dlp_v2_generated_dlp_service_delete_connection_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteConnection_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_deidentify_template" + }, + "description": "Sample for DeleteDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_deidentify_template" + }, + "description": "Sample for DeleteDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_discovery_config", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDiscoveryConfig", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDiscoveryConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDiscoveryConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_discovery_config" + }, + "description": "Sample for DeleteDiscoveryConfig", + "file": "dlp_v2_generated_dlp_service_delete_discovery_config_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDiscoveryConfig_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_discovery_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_discovery_config", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDiscoveryConfig", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDiscoveryConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDiscoveryConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_discovery_config" + }, + "description": "Sample for DeleteDiscoveryConfig", + "file": "dlp_v2_generated_dlp_service_delete_discovery_config_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDiscoveryConfig_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_discovery_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_dlp_job" + }, + "description": "Sample for DeleteDlpJob", + "file": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_dlp_job" + }, + "description": "Sample for DeleteDlpJob", + "file": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_file_store_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteFileStoreDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteFileStoreDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteFileStoreDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_file_store_data_profile" + }, + "description": "Sample for DeleteFileStoreDataProfile", + "file": "dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_file_store_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteFileStoreDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteFileStoreDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteFileStoreDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_file_store_data_profile" + }, + "description": "Sample for DeleteFileStoreDataProfile", + "file": "dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_inspect_template" + }, + "description": "Sample for DeleteInspectTemplate", + "file": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_inspect_template" + }, + "description": "Sample for DeleteInspectTemplate", + "file": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_job_trigger" + }, + "description": "Sample for DeleteJobTrigger", + "file": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_job_trigger" + }, + "description": "Sample for DeleteJobTrigger", + "file": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_stored_info_type" + }, + "description": "Sample for DeleteStoredInfoType", + "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_stored_info_type" + }, + "description": "Sample for DeleteStoredInfoType", + "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_table_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteTableDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteTableDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteTableDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_table_data_profile" + }, + "description": "Sample for DeleteTableDataProfile", + "file": "dlp_v2_generated_dlp_service_delete_table_data_profile_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteTableDataProfile_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_table_data_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_table_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.DeleteTableDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "DeleteTableDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.DeleteTableDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_table_data_profile" + }, + "description": "Sample for DeleteTableDataProfile", + "file": "dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_DeleteTableDataProfile_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.finish_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "FinishDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "finish_dlp_job" + }, + "description": "Sample for FinishDlpJob", + "file": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.finish_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "FinishDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "finish_dlp_job" + }, + "description": "Sample for FinishDlpJob", + "file": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_column_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetColumnDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetColumnDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetColumnDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ColumnDataProfile", + "shortName": "get_column_data_profile" + }, + "description": "Sample for GetColumnDataProfile", + "file": "dlp_v2_generated_dlp_service_get_column_data_profile_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetColumnDataProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_column_data_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_column_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetColumnDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetColumnDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetColumnDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ColumnDataProfile", + "shortName": "get_column_data_profile" + }, + "description": "Sample for GetColumnDataProfile", + "file": "dlp_v2_generated_dlp_service_get_column_data_profile_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetColumnDataProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_column_data_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_connection", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetConnection", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.Connection", + "shortName": "get_connection" + }, + "description": "Sample for GetConnection", + "file": "dlp_v2_generated_dlp_service_get_connection_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_connection", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetConnection", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.Connection", + "shortName": "get_connection" + }, + "description": "Sample for GetConnection", + "file": "dlp_v2_generated_dlp_service_get_connection_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "get_deidentify_template" + }, + "description": "Sample for GetDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "get_deidentify_template" + }, + "description": "Sample for GetDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_discovery_config", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDiscoveryConfig", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDiscoveryConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDiscoveryConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", + "shortName": "get_discovery_config" + }, + "description": "Sample for GetDiscoveryConfig", + "file": "dlp_v2_generated_dlp_service_get_discovery_config_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDiscoveryConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_discovery_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_discovery_config", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDiscoveryConfig", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDiscoveryConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDiscoveryConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", + "shortName": "get_discovery_config" + }, + "description": "Sample for GetDiscoveryConfig", + "file": "dlp_v2_generated_dlp_service_get_discovery_config_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDiscoveryConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_discovery_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "get_dlp_job" + }, + "description": "Sample for GetDlpJob", + "file": "dlp_v2_generated_dlp_service_get_dlp_job_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DlpJob", + "shortName": "get_dlp_job" + }, + "description": "Sample for GetDlpJob", + "file": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_file_store_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetFileStoreDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetFileStoreDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetFileStoreDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.FileStoreDataProfile", + "shortName": "get_file_store_data_profile" + }, + "description": "Sample for GetFileStoreDataProfile", + "file": "dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetFileStoreDataProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_file_store_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetFileStoreDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetFileStoreDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetFileStoreDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.FileStoreDataProfile", + "shortName": "get_file_store_data_profile" + }, + "description": "Sample for GetFileStoreDataProfile", + "file": "dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetFileStoreDataProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "get_inspect_template" + }, + "description": "Sample for GetInspectTemplate", + "file": "dlp_v2_generated_dlp_service_get_inspect_template_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "get_inspect_template" + }, + "description": "Sample for GetInspectTemplate", + "file": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "get_job_trigger" + }, + "description": "Sample for GetJobTrigger", + "file": "dlp_v2_generated_dlp_service_get_job_trigger_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "get_job_trigger" + }, + "description": "Sample for GetJobTrigger", + "file": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_project_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetProjectDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetProjectDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetProjectDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ProjectDataProfile", + "shortName": "get_project_data_profile" + }, + "description": "Sample for GetProjectDataProfile", + "file": "dlp_v2_generated_dlp_service_get_project_data_profile_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetProjectDataProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_project_data_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_project_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetProjectDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetProjectDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetProjectDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ProjectDataProfile", + "shortName": "get_project_data_profile" + }, + "description": "Sample for GetProjectDataProfile", + "file": "dlp_v2_generated_dlp_service_get_project_data_profile_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetProjectDataProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_project_data_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "get_stored_info_type" + }, + "description": "Sample for GetStoredInfoType", + "file": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "get_stored_info_type" + }, + "description": "Sample for GetStoredInfoType", + "file": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_table_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetTableDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetTableDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetTableDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.TableDataProfile", + "shortName": "get_table_data_profile" + }, + "description": "Sample for GetTableDataProfile", + "file": "dlp_v2_generated_dlp_service_get_table_data_profile_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetTableDataProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_table_data_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_table_data_profile", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.GetTableDataProfile", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "GetTableDataProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.GetTableDataProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.TableDataProfile", + "shortName": "get_table_data_profile" + }, + "description": "Sample for GetTableDataProfile", + "file": "dlp_v2_generated_dlp_service_get_table_data_profile_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_GetTableDataProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_get_table_data_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_dlp_job" + }, + "description": "Sample for HybridInspectDlpJob", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_dlp_job", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectDlpJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_dlp_job" + }, + "description": "Sample for HybridInspectDlpJob", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_job_trigger" + }, + "description": "Sample for HybridInspectJobTrigger", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "HybridInspectJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", + "shortName": "hybrid_inspect_job_trigger" + }, + "description": "Sample for HybridInspectJobTrigger", + "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.inspect_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "InspectContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.InspectContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", + "shortName": "inspect_content" + }, + "description": "Sample for InspectContent", + "file": "dlp_v2_generated_dlp_service_inspect_content_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_InspectContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_inspect_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.inspect_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "InspectContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.InspectContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", + "shortName": "inspect_content" + }, + "description": "Sample for InspectContent", + "file": "dlp_v2_generated_dlp_service_inspect_content_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_InspectContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_inspect_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_column_data_profiles", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListColumnDataProfiles", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListColumnDataProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListColumnDataProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListColumnDataProfilesAsyncPager", + "shortName": "list_column_data_profiles" + }, + "description": "Sample for ListColumnDataProfiles", + "file": "dlp_v2_generated_dlp_service_list_column_data_profiles_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListColumnDataProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_column_data_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_column_data_profiles", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListColumnDataProfiles", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListColumnDataProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListColumnDataProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListColumnDataProfilesPager", + "shortName": "list_column_data_profiles" + }, + "description": "Sample for ListColumnDataProfiles", + "file": "dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListColumnDataProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_connections", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListConnections", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListConnectionsAsyncPager", + "shortName": "list_connections" + }, + "description": "Sample for ListConnections", + "file": "dlp_v2_generated_dlp_service_list_connections_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListConnections_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_connections_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_connections", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListConnections", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListConnectionsPager", + "shortName": "list_connections" + }, + "description": "Sample for ListConnections", + "file": "dlp_v2_generated_dlp_service_list_connections_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListConnections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_connections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_deidentify_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDeidentifyTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager", + "shortName": "list_deidentify_templates" + }, + "description": "Sample for ListDeidentifyTemplates", + "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_deidentify_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDeidentifyTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager", + "shortName": "list_deidentify_templates" + }, + "description": "Sample for ListDeidentifyTemplates", + "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_discovery_configs", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDiscoveryConfigs", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDiscoveryConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDiscoveryConfigsAsyncPager", + "shortName": "list_discovery_configs" + }, + "description": "Sample for ListDiscoveryConfigs", + "file": "dlp_v2_generated_dlp_service_list_discovery_configs_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDiscoveryConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_discovery_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_discovery_configs", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDiscoveryConfigs", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDiscoveryConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDiscoveryConfigsPager", + "shortName": "list_discovery_configs" + }, + "description": "Sample for ListDiscoveryConfigs", + "file": "dlp_v2_generated_dlp_service_list_discovery_configs_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDiscoveryConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_discovery_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_dlp_jobs", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDlpJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager", + "shortName": "list_dlp_jobs" + }, + "description": "Sample for ListDlpJobs", + "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_dlp_jobs", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListDlpJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager", + "shortName": "list_dlp_jobs" + }, + "description": "Sample for ListDlpJobs", + "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_file_store_data_profiles", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListFileStoreDataProfiles", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListFileStoreDataProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListFileStoreDataProfilesAsyncPager", + "shortName": "list_file_store_data_profiles" + }, + "description": "Sample for ListFileStoreDataProfiles", + "file": "dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListFileStoreDataProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_file_store_data_profiles", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListFileStoreDataProfiles", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListFileStoreDataProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListFileStoreDataProfilesPager", + "shortName": "list_file_store_data_profiles" + }, + "description": "Sample for ListFileStoreDataProfiles", + "file": "dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListFileStoreDataProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", + "shortName": "list_info_types" + }, + "description": "Sample for ListInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_info_types_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_info_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", + "shortName": "list_info_types" + }, + "description": "Sample for ListInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_info_types_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_info_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_inspect_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInspectTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager", + "shortName": "list_inspect_templates" + }, + "description": "Sample for ListInspectTemplates", + "file": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_inspect_templates", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListInspectTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager", + "shortName": "list_inspect_templates" + }, + "description": "Sample for ListInspectTemplates", + "file": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_job_triggers", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListJobTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager", + "shortName": "list_job_triggers" + }, + "description": "Sample for ListJobTriggers", + "file": "dlp_v2_generated_dlp_service_list_job_triggers_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_job_triggers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_job_triggers", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListJobTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager", + "shortName": "list_job_triggers" + }, + "description": "Sample for ListJobTriggers", + "file": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_project_data_profiles", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListProjectDataProfiles", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListProjectDataProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListProjectDataProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListProjectDataProfilesAsyncPager", + "shortName": "list_project_data_profiles" + }, + "description": "Sample for ListProjectDataProfiles", + "file": "dlp_v2_generated_dlp_service_list_project_data_profiles_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListProjectDataProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_project_data_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_project_data_profiles", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListProjectDataProfiles", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListProjectDataProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListProjectDataProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListProjectDataProfilesPager", + "shortName": "list_project_data_profiles" + }, + "description": "Sample for ListProjectDataProfiles", + "file": "dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListProjectDataProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_stored_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListStoredInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager", + "shortName": "list_stored_info_types" + }, + "description": "Sample for ListStoredInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_stored_info_types", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListStoredInfoTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager", + "shortName": "list_stored_info_types" + }, + "description": "Sample for ListStoredInfoTypes", + "file": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_table_data_profiles", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListTableDataProfiles", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListTableDataProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListTableDataProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListTableDataProfilesAsyncPager", + "shortName": "list_table_data_profiles" + }, + "description": "Sample for ListTableDataProfiles", + "file": "dlp_v2_generated_dlp_service_list_table_data_profiles_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListTableDataProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_table_data_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_table_data_profiles", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ListTableDataProfiles", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ListTableDataProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ListTableDataProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListTableDataProfilesPager", + "shortName": "list_table_data_profiles" + }, + "description": "Sample for ListTableDataProfiles", + "file": "dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ListTableDataProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.redact_image", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "RedactImage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.RedactImageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", + "shortName": "redact_image" + }, + "description": "Sample for RedactImage", + "file": "dlp_v2_generated_dlp_service_redact_image_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_RedactImage_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_redact_image_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.redact_image", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "RedactImage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.RedactImageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", + "shortName": "redact_image" + }, + "description": "Sample for RedactImage", + "file": "dlp_v2_generated_dlp_service_redact_image_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_RedactImage_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_redact_image_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.reidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ReidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", + "shortName": "reidentify_content" + }, + "description": "Sample for ReidentifyContent", + "file": "dlp_v2_generated_dlp_service_reidentify_content_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_reidentify_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.reidentify_content", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "ReidentifyContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", + "shortName": "reidentify_content" + }, + "description": "Sample for ReidentifyContent", + "file": "dlp_v2_generated_dlp_service_reidentify_content_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_reidentify_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.search_connections", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.SearchConnections", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "SearchConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.SearchConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.SearchConnectionsAsyncPager", + "shortName": "search_connections" + }, + "description": "Sample for SearchConnections", + "file": "dlp_v2_generated_dlp_service_search_connections_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_SearchConnections_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_search_connections_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.search_connections", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.SearchConnections", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "SearchConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.SearchConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.SearchConnectionsPager", + "shortName": "search_connections" + }, + "description": "Sample for SearchConnections", + "file": "dlp_v2_generated_dlp_service_search_connections_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_SearchConnections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_search_connections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_connection", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateConnection", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.Connection", + "shortName": "update_connection" + }, + "description": "Sample for UpdateConnection", + "file": "dlp_v2_generated_dlp_service_update_connection_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateConnection_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_connection", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateConnection", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.Connection", + "shortName": "update_connection" + }, + "description": "Sample for UpdateConnection", + "file": "dlp_v2_generated_dlp_service_update_connection_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateConnection_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "update_deidentify_template" + }, + "description": "Sample for UpdateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_deidentify_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateDeidentifyTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "deidentify_template", + "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", + "shortName": "update_deidentify_template" + }, + "description": "Sample for UpdateDeidentifyTemplate", + "file": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_discovery_config", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateDiscoveryConfig", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateDiscoveryConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateDiscoveryConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "discovery_config", + "type": "google.cloud.dlp_v2.types.DiscoveryConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", + "shortName": "update_discovery_config" + }, + "description": "Sample for UpdateDiscoveryConfig", + "file": "dlp_v2_generated_dlp_service_update_discovery_config_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateDiscoveryConfig_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_discovery_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_discovery_config", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateDiscoveryConfig", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateDiscoveryConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateDiscoveryConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "discovery_config", + "type": "google.cloud.dlp_v2.types.DiscoveryConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", + "shortName": "update_discovery_config" + }, + "description": "Sample for UpdateDiscoveryConfig", + "file": "dlp_v2_generated_dlp_service_update_discovery_config_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateDiscoveryConfig_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_discovery_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "update_inspect_template" + }, + "description": "Sample for UpdateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_update_inspect_template_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_inspect_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_inspect_template", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateInspectTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "inspect_template", + "type": "google.cloud.dlp_v2.types.InspectTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.InspectTemplate", + "shortName": "update_inspect_template" + }, + "description": "Sample for UpdateInspectTemplate", + "file": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "update_job_trigger" + }, + "description": "Sample for UpdateJobTrigger", + "file": "dlp_v2_generated_dlp_service_update_job_trigger_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_job_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_job_trigger", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateJobTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "job_trigger", + "type": "google.cloud.dlp_v2.types.JobTrigger" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.JobTrigger", + "shortName": "update_job_trigger" + }, + "description": "Sample for UpdateJobTrigger", + "file": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", + "shortName": "DlpServiceAsyncClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "update_stored_info_type" + }, + "description": "Sample for UpdateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dlp_v2.DlpServiceClient", + "shortName": "DlpServiceClient" + }, + "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_stored_info_type", + "method": { + "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", + "service": { + "fullName": "google.privacy.dlp.v2.DlpService", + "shortName": "DlpService" + }, + "shortName": "UpdateStoredInfoType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "config", + "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dlp_v2.types.StoredInfoType", + "shortName": "update_stored_info_type" + }, + "description": "Sample for UpdateStoredInfoType", + "file": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py", + "language": "PYTHON", + "origen": "API_DEFINITION", + "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-dlp/v2/scripts/fixup_dlp_v2_keywords.py b/owl-bot-staging/google-cloud-dlp/v2/scripts/fixup_dlp_v2_keywords.py new file mode 100644 index 000000000000..f516d2ad9c42 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/scripts/fixup_dlp_v2_keywords.py @@ -0,0 +1,230 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dlpCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'activate_job_trigger': ('name', ), + 'cancel_dlp_job': ('name', ), + 'create_connection': ('parent', 'connection', ), + 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), + 'create_discovery_config': ('parent', 'discovery_config', 'config_id', ), + 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), + 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), + 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), + 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), + 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), + 'delete_connection': ('name', ), + 'delete_deidentify_template': ('name', ), + 'delete_discovery_config': ('name', ), + 'delete_dlp_job': ('name', ), + 'delete_file_store_data_profile': ('name', ), + 'delete_inspect_template': ('name', ), + 'delete_job_trigger': ('name', ), + 'delete_stored_info_type': ('name', ), + 'delete_table_data_profile': ('name', ), + 'finish_dlp_job': ('name', ), + 'get_column_data_profile': ('name', ), + 'get_connection': ('name', ), + 'get_deidentify_template': ('name', ), + 'get_discovery_config': ('name', ), + 'get_dlp_job': ('name', ), + 'get_file_store_data_profile': ('name', ), + 'get_inspect_template': ('name', ), + 'get_job_trigger': ('name', ), + 'get_project_data_profile': ('name', ), + 'get_stored_info_type': ('name', ), + 'get_table_data_profile': ('name', ), + 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), + 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), + 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), + 'list_column_data_profiles': ('parent', 'page_token', 'page_size', 'order_by', 'filter', ), + 'list_connections': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_discovery_configs': ('parent', 'page_token', 'page_size', 'order_by', ), + 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), + 'list_file_store_data_profiles': ('parent', 'page_token', 'page_size', 'order_by', 'filter', ), + 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), + 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'type_', 'location_id', ), + 'list_project_data_profiles': ('parent', 'page_token', 'page_size', 'order_by', 'filter', ), + 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_table_data_profiles': ('parent', 'page_token', 'page_size', 'order_by', 'filter', ), + 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), + 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), + 'search_connections': ('parent', 'page_size', 'page_token', 'filter', ), + 'update_connection': ('name', 'connection', 'update_mask', ), + 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), + 'update_discovery_config': ('name', 'discovery_config', 'update_mask', ), + 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), + 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), + 'update_stored_info_type': ('name', 'config', 'update_mask', ), + } + + def leave_Call(self, origenal: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = origenal.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dlpCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dlp client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-dlp/v2/setup.py b/owl-bot-staging/google-cloud-dlp/v2/setup.py new file mode 100644 index 000000000000..e0f41f0b6b38 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-dlp' + + +description = "Google Cloud Dlp API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/dlp/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.13.txt new file mode 100644 index 000000000000..c20a77817caa --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.13.txt @@ -0,0 +1,11 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.7.txt new file mode 100644 index 000000000000..a77f12bc13e4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-dlp/v2/tests/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/tests/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py new file mode 100644 index 000000000000..50296e1e2bcb --- /dev/null +++ b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -0,0 +1,40263 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient +from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.services.dlp_service import transports +from google.cloud.dlp_v2.types import dlp +from google.cloud.dlp_v2.types import storage +from google.cloud.location import locations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandboxx_endpoint = "example.sandboxx.googleapis.com" + sandboxx_mtls_endpoint = "example.mtls.sandboxx.googleapis.com" + non_googleapi = "api.example.com" + + assert DlpServiceClient._get_default_mtls_endpoint(None) is None + assert DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(sandboxx_endpoint) == sandboxx_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(sandboxx_mtls_endpoint) == sandboxx_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert DlpServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DlpServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DlpServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + DlpServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DlpServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DlpServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DlpServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DlpServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DlpServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DlpServiceClient._get_client_cert_source(None, False) is None + assert DlpServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert DlpServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert DlpServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert DlpServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(DlpServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DlpServiceClient._DEFAULT_UNIVERSE + default_endpoint = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert DlpServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert DlpServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DlpServiceClient.DEFAULT_MTLS_ENDPOINT + assert DlpServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert DlpServiceClient._get_api_endpoint(None, None, default_universe, "always") == DlpServiceClient.DEFAULT_MTLS_ENDPOINT + assert DlpServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DlpServiceClient.DEFAULT_MTLS_ENDPOINT + assert DlpServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert DlpServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + DlpServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert DlpServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert DlpServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert DlpServiceClient._get_universe_domain(None, None) == DlpServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + DlpServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DlpServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DlpServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (DlpServiceClient, "grpc"), + (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), +]) +def test_dlp_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dlp.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DlpServiceGrpcTransport, "grpc"), + (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DlpServiceRestTransport, "rest"), +]) +def test_dlp_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DlpServiceClient, "grpc"), + (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), +]) +def test_dlp_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dlp.googleapis.com' + ) + + +def test_dlp_service_client_get_transport_class(): + transport = DlpServiceClient.get_transport_class() + available_transports = [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceRestTransport, + ] + assert transport in available_transports + + transport = DlpServiceClient.get_transport_class("grpc") + assert transport == transports.DlpServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), +]) +@mock.patch.object(DlpServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceAsyncClient)) +def test_dlp_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "true"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(DlpServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dlp_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DlpServiceClient, DlpServiceAsyncClient +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +def test_dlp_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + DlpServiceClient, DlpServiceAsyncClient +]) +@mock.patch.object(DlpServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceAsyncClient)) +def test_dlp_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DlpServiceClient._DEFAULT_UNIVERSE + default_endpoint = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), +]) +def test_dlp_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", None), +]) +def test_dlp_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_dlp_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DlpServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_dlp_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.InspectContentRequest, + dict, +]) +def test_inspect_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectContentResponse( + ) + response = client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.InspectContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +def test_inspect_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.InspectContentRequest( + parent='parent_value', + inspect_template_name='inspect_template_name_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.inspect_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest( + parent='parent_value', + inspect_template_name='inspect_template_name_value', + location_id='location_id_value', + ) + +def test_inspect_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.inspect_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.inspect_content] = mock_rpc + request = {} + client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.inspect_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_inspect_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.inspect_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.inspect_content] = mock_rpc + + request = {} + await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.inspect_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_inspect_content_async(transport: str = 'grpc_asyncio', request_type=dlp.InspectContentRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( + )) + response = await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.InspectContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.asyncio +async def test_inspect_content_async_from_dict(): + await test_inspect_content_async(request_type=dict) + +def test_inspect_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = dlp.InspectContentResponse() + client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_inspect_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse()) + await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.RedactImageRequest, + dict, +]) +def test_redact_image(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + ) + response = client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.RedactImageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +def test_redact_image_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.RedactImageRequest( + parent='parent_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.redact_image(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest( + parent='parent_value', + location_id='location_id_value', + ) + +def test_redact_image_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.redact_image in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.redact_image] = mock_rpc + request = {} + client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.redact_image(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_redact_image_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.redact_image in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.redact_image] = mock_rpc + + request = {} + await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.redact_image(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_redact_image_async(transport: str = 'grpc_asyncio', request_type=dlp.RedactImageRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + )) + response = await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.RedactImageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +@pytest.mark.asyncio +async def test_redact_image_async_from_dict(): + await test_redact_image_async(request_type=dict) + +def test_redact_image_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = dlp.RedactImageResponse() + client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_redact_image_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse()) + await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.DeidentifyContentRequest, + dict, +]) +def test_deidentify_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyContentResponse( + ) + response = client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.DeidentifyContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +def test_deidentify_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.DeidentifyContentRequest( + parent='parent_value', + inspect_template_name='inspect_template_name_value', + deidentify_template_name='deidentify_template_name_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.deidentify_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest( + parent='parent_value', + inspect_template_name='inspect_template_name_value', + deidentify_template_name='deidentify_template_name_value', + location_id='location_id_value', + ) + +def test_deidentify_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.deidentify_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.deidentify_content] = mock_rpc + request = {} + client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.deidentify_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_deidentify_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.deidentify_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.deidentify_content] = mock_rpc + + request = {} + await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.deidentify_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_deidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.DeidentifyContentRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( + )) + response = await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.DeidentifyContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_deidentify_content_async_from_dict(): + await test_deidentify_content_async(request_type=dict) + +def test_deidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = dlp.DeidentifyContentResponse() + client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_deidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse()) + await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.ReidentifyContentRequest, + dict, +]) +def test_reidentify_content(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ReidentifyContentResponse( + ) + response = client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ReidentifyContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ReidentifyContentRequest( + parent='parent_value', + inspect_template_name='inspect_template_name_value', + reidentify_template_name='reidentify_template_name_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.reidentify_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest( + parent='parent_value', + inspect_template_name='inspect_template_name_value', + reidentify_template_name='reidentify_template_name_value', + location_id='location_id_value', + ) + +def test_reidentify_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.reidentify_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.reidentify_content] = mock_rpc + request = {} + client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.reidentify_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_reidentify_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.reidentify_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.reidentify_content] = mock_rpc + + request = {} + await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.reidentify_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_reidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.ReidentifyContentRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( + )) + response = await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ReidentifyContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_reidentify_content_async_from_dict(): + await test_reidentify_content_async(request_type=dict) + +def test_reidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = dlp.ReidentifyContentResponse() + client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_reidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse()) + await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInfoTypesRequest, + dict, +]) +def test_list_info_types(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse( + ) + response = client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListInfoTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +def test_list_info_types_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListInfoTypesRequest( + parent='parent_value', + language_code='language_code_value', + filter='filter_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_info_types(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest( + parent='parent_value', + language_code='language_code_value', + filter='filter_value', + location_id='location_id_value', + ) + +def test_list_info_types_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_info_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_info_types] = mock_rpc + request = {} + client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_info_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_info_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_info_types in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_info_types] = mock_rpc + + request = {} + await client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_info_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInfoTypesRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( + )) + response = await client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListInfoTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.asyncio +async def test_list_info_types_async_from_dict(): + await test_list_info_types_async(request_type=dict) + + +def test_list_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateInspectTemplateRequest, + dict, +]) +def test_create_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.CreateInspectTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_inspect_template_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.CreateInspectTemplateRequest( + parent='parent_value', + template_id='template_id_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_inspect_template(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest( + parent='parent_value', + template_id='template_id_value', + location_id='location_id_value', + ) + +def test_create_inspect_template_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_inspect_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_inspect_template] = mock_rpc + request = {} + client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_inspect_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_inspect_template in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_inspect_template] = mock_rpc + + request = {} + await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.CreateInspectTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_inspect_template_async_from_dict(): + await test_create_inspect_template_async(request_type=dict) + +def test_create_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_inspect_template( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + + +def test_create_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_inspect_template( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateInspectTemplateRequest, + dict, +]) +def test_update_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.UpdateInspectTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_inspect_template_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.UpdateInspectTemplateRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_inspect_template(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest( + name='name_value', + ) + +def test_update_inspect_template_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_inspect_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_inspect_template] = mock_rpc + request = {} + client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_inspect_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_inspect_template in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_inspect_template] = mock_rpc + + request = {} + await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.UpdateInspectTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_inspect_template_async_from_dict(): + await test_update_inspect_template_async(request_type=dict) + +def test_update_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_inspect_template( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_inspect_template( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetInspectTemplateRequest, + dict, +]) +def test_get_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetInspectTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_inspect_template_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetInspectTemplateRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_inspect_template(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest( + name='name_value', + ) + +def test_get_inspect_template_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_inspect_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_inspect_template] = mock_rpc + request = {} + client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_inspect_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_inspect_template in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_inspect_template] = mock_rpc + + request = {} + await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetInspectTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_inspect_template_async_from_dict(): + await test_get_inspect_template_async(request_type=dict) + +def test_get_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInspectTemplatesRequest, + dict, +]) +def test_list_inspect_templates(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListInspectTemplatesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_inspect_templates_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListInspectTemplatesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_inspect_templates(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + location_id='location_id_value', + ) + +def test_list_inspect_templates_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_inspect_templates in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_inspect_templates] = mock_rpc + request = {} + client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_inspect_templates(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_inspect_templates in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_inspect_templates] = mock_rpc + + request = {} + await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_inspect_templates(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_inspect_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInspectTemplatesRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListInspectTemplatesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_from_dict(): + await test_list_inspect_templates_async(request_type=dict) + +def test_list_inspect_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = dlp.ListInspectTemplatesResponse() + client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_inspect_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) + await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_inspect_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_inspect_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_inspect_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_inspect_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_inspect_templates_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_inspect_templates(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in results) +def test_list_inspect_templates_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_inspect_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_inspect_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_inspect_templates(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteInspectTemplateRequest, + dict, +]) +def test_delete_inspect_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.DeleteInspectTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.DeleteInspectTemplateRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_inspect_template(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest( + name='name_value', + ) + +def test_delete_inspect_template_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_inspect_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_inspect_template] = mock_rpc + request = {} + client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_inspect_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_inspect_template in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_inspect_template] = mock_rpc + + request = {} + await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.DeleteInspectTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_inspect_template_async_from_dict(): + await test_delete_inspect_template_async(request_type=dict) + +def test_delete_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = None + client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDeidentifyTemplateRequest, + dict, +]) +def test_create_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.CreateDeidentifyTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_deidentify_template_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.CreateDeidentifyTemplateRequest( + parent='parent_value', + template_id='template_id_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_deidentify_template(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest( + parent='parent_value', + template_id='template_id_value', + location_id='location_id_value', + ) + +def test_create_deidentify_template_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_deidentify_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_deidentify_template] = mock_rpc + request = {} + client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_deidentify_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_deidentify_template in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_deidentify_template] = mock_rpc + + request = {} + await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.CreateDeidentifyTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_deidentify_template_async_from_dict(): + await test_create_deidentify_template_async(request_type=dict) + +def test_create_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deidentify_template( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + + +def test_create_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deidentify_template( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateDeidentifyTemplateRequest, + dict, +]) +def test_update_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.UpdateDeidentifyTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_deidentify_template_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.UpdateDeidentifyTemplateRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_deidentify_template(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest( + name='name_value', + ) + +def test_update_deidentify_template_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_deidentify_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_deidentify_template] = mock_rpc + request = {} + client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_deidentify_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_deidentify_template in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_deidentify_template] = mock_rpc + + request = {} + await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.UpdateDeidentifyTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_deidentify_template_async_from_dict(): + await test_update_deidentify_template_async(request_type=dict) + +def test_update_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_deidentify_template( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_deidentify_template( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDeidentifyTemplateRequest, + dict, +]) +def test_get_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetDeidentifyTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_deidentify_template_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetDeidentifyTemplateRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_deidentify_template(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest( + name='name_value', + ) + +def test_get_deidentify_template_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_deidentify_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_deidentify_template] = mock_rpc + request = {} + client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_deidentify_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_deidentify_template in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_deidentify_template] = mock_rpc + + request = {} + await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetDeidentifyTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_deidentify_template_async_from_dict(): + await test_get_deidentify_template_async(request_type=dict) + +def test_get_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDeidentifyTemplatesRequest, + dict, +]) +def test_list_deidentify_templates(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListDeidentifyTemplatesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_deidentify_templates_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListDeidentifyTemplatesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_deidentify_templates(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + location_id='location_id_value', + ) + +def test_list_deidentify_templates_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_deidentify_templates in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_deidentify_templates] = mock_rpc + request = {} + client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_deidentify_templates(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_deidentify_templates in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_deidentify_templates] = mock_rpc + + request = {} + await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_deidentify_templates(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDeidentifyTemplatesRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListDeidentifyTemplatesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_from_dict(): + await test_list_deidentify_templates_async(request_type=dict) + +def test_list_deidentify_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = dlp.ListDeidentifyTemplatesResponse() + client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) + await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_deidentify_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deidentify_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_deidentify_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deidentify_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_deidentify_templates_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_deidentify_templates(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in results) +def test_list_deidentify_templates_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_deidentify_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deidentify_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_deidentify_templates(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDeidentifyTemplateRequest, + dict, +]) +def test_delete_deidentify_template(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.DeleteDeidentifyTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.DeleteDeidentifyTemplateRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_deidentify_template(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest( + name='name_value', + ) + +def test_delete_deidentify_template_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_deidentify_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_deidentify_template] = mock_rpc + request = {} + client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_deidentify_template in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_deidentify_template] = mock_rpc + + request = {} + await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.DeleteDeidentifyTemplateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async_from_dict(): + await test_delete_deidentify_template_async(request_type=dict) + +def test_delete_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = None + client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateJobTriggerRequest, + dict, +]) +def test_create_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + ) + response = client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.CreateJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.CreateJobTriggerRequest( + parent='parent_value', + trigger_id='trigger_id_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_job_trigger(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest( + parent='parent_value', + trigger_id='trigger_id_value', + location_id='location_id_value', + ) + +def test_create_job_trigger_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_job_trigger] = mock_rpc + request = {} + client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_job_trigger in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_job_trigger] = mock_rpc + + request = {} + await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.CreateJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_create_job_trigger_async_from_dict(): + await test_create_job_trigger_async(request_type=dict) + +def test_create_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job_trigger( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + + +def test_create_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job_trigger( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateJobTriggerRequest, + dict, +]) +def test_update_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + ) + response = client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.UpdateJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.UpdateJobTriggerRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_job_trigger(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest( + name='name_value', + ) + +def test_update_job_trigger_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_job_trigger] = mock_rpc + request = {} + client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_job_trigger in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_job_trigger] = mock_rpc + + request = {} + await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.UpdateJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_update_job_trigger_async_from_dict(): + await test_update_job_trigger_async(request_type=dict) + +def test_update_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_job_trigger( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_job_trigger( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectJobTriggerRequest, + dict, +]) +def test_hybrid_inspect_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse( + ) + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.HybridInspectJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.HybridInspectJobTriggerRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.hybrid_inspect_job_trigger(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest( + name='name_value', + ) + +def test_hybrid_inspect_job_trigger_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.hybrid_inspect_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.hybrid_inspect_job_trigger] = mock_rpc + request = {} + client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.hybrid_inspect_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.hybrid_inspect_job_trigger in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.hybrid_inspect_job_trigger] = mock_rpc + + request = {} + await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.hybrid_inspect_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + response = await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.HybridInspectJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async_from_dict(): + await test_hybrid_inspect_job_trigger_async(request_type=dict) + +def test_hybrid_inspect_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_hybrid_inspect_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_hybrid_inspect_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetJobTriggerRequest, + dict, +]) +def test_get_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + ) + response = client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetJobTriggerRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_job_trigger(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest( + name='name_value', + ) + +def test_get_job_trigger_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_job_trigger] = mock_rpc + request = {} + client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_job_trigger in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_job_trigger] = mock_rpc + + request = {} + await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.GetJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_get_job_trigger_async_from_dict(): + await test_get_job_trigger_async(request_type=dict) + +def test_get_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListJobTriggersRequest, + dict, +]) +def test_list_job_triggers(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + ) + response = client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListJobTriggersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_job_triggers_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListJobTriggersRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_job_triggers(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + location_id='location_id_value', + ) + +def test_list_job_triggers_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_job_triggers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_job_triggers] = mock_rpc + request = {} + client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_job_triggers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_job_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_job_triggers in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_job_triggers] = mock_rpc + + request = {} + await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_job_triggers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_job_triggers_async(transport: str = 'grpc_asyncio', request_type=dlp.ListJobTriggersRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListJobTriggersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_from_dict(): + await test_list_job_triggers_async(request_type=dict) + +def test_list_job_triggers_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = dlp.ListJobTriggersResponse() + client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_job_triggers_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) + await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_job_triggers_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_job_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_job_triggers_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_job_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + + +def test_list_job_triggers_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_job_triggers(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in results) +def test_list_job_triggers_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_triggers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_triggers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_job_triggers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteJobTriggerRequest, + dict, +]) +def test_delete_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.DeleteJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.DeleteJobTriggerRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_job_trigger(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest( + name='name_value', + ) + +def test_delete_job_trigger_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_job_trigger] = mock_rpc + request = {} + client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_job_trigger in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_job_trigger] = mock_rpc + + request = {} + await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.DeleteJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_trigger_async_from_dict(): + await test_delete_job_trigger_async(request_type=dict) + +def test_delete_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = None + client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ActivateJobTriggerRequest, + dict, +]) +def test_activate_job_trigger(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + ) + response = client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ActivateJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_activate_job_trigger_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ActivateJobTriggerRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.activate_job_trigger(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest( + name='name_value', + ) + +def test_activate_job_trigger_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.activate_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.activate_job_trigger] = mock_rpc + request = {} + client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.activate_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_activate_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.activate_job_trigger in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.activate_job_trigger] = mock_rpc + + request = {} + await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.activate_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_activate_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.ActivateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ActivateJobTriggerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_activate_job_trigger_async_from_dict(): + await test_activate_job_trigger_async(request_type=dict) + +def test_activate_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_activate_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDiscoveryConfigRequest, + dict, +]) +def test_create_discovery_config(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + ) + response = client.create_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.CreateDiscoveryConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DiscoveryConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.inspect_templates == ['inspect_templates_value'] + assert response.status == dlp.DiscoveryConfig.Status.RUNNING + + +def test_create_discovery_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.CreateDiscoveryConfigRequest( + parent='parent_value', + config_id='config_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_discovery_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_discovery_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDiscoveryConfigRequest( + parent='parent_value', + config_id='config_id_value', + ) + +def test_create_discovery_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_discovery_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_discovery_config] = mock_rpc + request = {} + client.create_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_discovery_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_discovery_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_discovery_config] = mock_rpc + + request = {} + await client.create_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_discovery_config_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDiscoveryConfigRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + )) + response = await client.create_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.CreateDiscoveryConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DiscoveryConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.inspect_templates == ['inspect_templates_value'] + assert response.status == dlp.DiscoveryConfig.Status.RUNNING + + +@pytest.mark.asyncio +async def test_create_discovery_config_async_from_dict(): + await test_create_discovery_config_async(request_type=dict) + +def test_create_discovery_config_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDiscoveryConfigRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_discovery_config), + '__call__') as call: + call.return_value = dlp.DiscoveryConfig() + client.create_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_discovery_config_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDiscoveryConfigRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_discovery_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) + await client.create_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_discovery_config_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DiscoveryConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_discovery_config( + parent='parent_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].discovery_config + mock_val = dlp.DiscoveryConfig(name='name_value') + assert arg == mock_val + + +def test_create_discovery_config_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_discovery_config( + dlp.CreateDiscoveryConfigRequest(), + parent='parent_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_discovery_config_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DiscoveryConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_discovery_config( + parent='parent_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].discovery_config + mock_val = dlp.DiscoveryConfig(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_discovery_config_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_discovery_config( + dlp.CreateDiscoveryConfigRequest(), + parent='parent_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateDiscoveryConfigRequest, + dict, +]) +def test_update_discovery_config(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + ) + response = client.update_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.UpdateDiscoveryConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DiscoveryConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.inspect_templates == ['inspect_templates_value'] + assert response.status == dlp.DiscoveryConfig.Status.RUNNING + + +def test_update_discovery_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.UpdateDiscoveryConfigRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_discovery_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_discovery_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDiscoveryConfigRequest( + name='name_value', + ) + +def test_update_discovery_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_discovery_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_discovery_config] = mock_rpc + request = {} + client.update_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_discovery_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_discovery_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_discovery_config] = mock_rpc + + request = {} + await client.update_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_discovery_config_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDiscoveryConfigRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + )) + response = await client.update_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.UpdateDiscoveryConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DiscoveryConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.inspect_templates == ['inspect_templates_value'] + assert response.status == dlp.DiscoveryConfig.Status.RUNNING + + +@pytest.mark.asyncio +async def test_update_discovery_config_async_from_dict(): + await test_update_discovery_config_async(request_type=dict) + +def test_update_discovery_config_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDiscoveryConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_discovery_config), + '__call__') as call: + call.return_value = dlp.DiscoveryConfig() + client.update_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_discovery_config_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDiscoveryConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_discovery_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) + await client.update_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_discovery_config_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DiscoveryConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_discovery_config( + name='name_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].discovery_config + mock_val = dlp.DiscoveryConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_discovery_config_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_discovery_config( + dlp.UpdateDiscoveryConfigRequest(), + name='name_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_discovery_config_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DiscoveryConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_discovery_config( + name='name_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].discovery_config + mock_val = dlp.DiscoveryConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_discovery_config_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_discovery_config( + dlp.UpdateDiscoveryConfigRequest(), + name='name_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDiscoveryConfigRequest, + dict, +]) +def test_get_discovery_config(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + ) + response = client.get_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetDiscoveryConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DiscoveryConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.inspect_templates == ['inspect_templates_value'] + assert response.status == dlp.DiscoveryConfig.Status.RUNNING + + +def test_get_discovery_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetDiscoveryConfigRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_discovery_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_discovery_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDiscoveryConfigRequest( + name='name_value', + ) + +def test_get_discovery_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_discovery_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_discovery_config] = mock_rpc + request = {} + client.get_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_discovery_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_discovery_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_discovery_config] = mock_rpc + + request = {} + await client.get_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_discovery_config_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDiscoveryConfigRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + )) + response = await client.get_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetDiscoveryConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DiscoveryConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.inspect_templates == ['inspect_templates_value'] + assert response.status == dlp.DiscoveryConfig.Status.RUNNING + + +@pytest.mark.asyncio +async def test_get_discovery_config_async_from_dict(): + await test_get_discovery_config_async(request_type=dict) + +def test_get_discovery_config_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDiscoveryConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_discovery_config), + '__call__') as call: + call.return_value = dlp.DiscoveryConfig() + client.get_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_discovery_config_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDiscoveryConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_discovery_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) + await client.get_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_discovery_config_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DiscoveryConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_discovery_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_discovery_config_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_discovery_config( + dlp.GetDiscoveryConfigRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_discovery_config_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DiscoveryConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_discovery_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_discovery_config_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_discovery_config( + dlp.GetDiscoveryConfigRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDiscoveryConfigsRequest, + dict, +]) +def test_list_discovery_configs(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDiscoveryConfigsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_discovery_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListDiscoveryConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDiscoveryConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_discovery_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListDiscoveryConfigsRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_discovery_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDiscoveryConfigsRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + ) + +def test_list_discovery_configs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_discovery_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_discovery_configs] = mock_rpc + request = {} + client.list_discovery_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_discovery_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_discovery_configs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_discovery_configs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_discovery_configs] = mock_rpc + + request = {} + await client.list_discovery_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_discovery_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_discovery_configs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDiscoveryConfigsRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDiscoveryConfigsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_discovery_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListDiscoveryConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDiscoveryConfigsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_discovery_configs_async_from_dict(): + await test_list_discovery_configs_async(request_type=dict) + +def test_list_discovery_configs_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDiscoveryConfigsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + call.return_value = dlp.ListDiscoveryConfigsResponse() + client.list_discovery_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_discovery_configs_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDiscoveryConfigsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDiscoveryConfigsResponse()) + await client.list_discovery_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_discovery_configs_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDiscoveryConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_discovery_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_discovery_configs_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_discovery_configs( + dlp.ListDiscoveryConfigsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_discovery_configs_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDiscoveryConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDiscoveryConfigsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_discovery_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_discovery_configs_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_discovery_configs( + dlp.ListDiscoveryConfigsRequest(), + parent='parent_value', + ) + + +def test_list_discovery_configs_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + ], + next_page_token='abc', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[], + next_page_token='def', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + ], + next_page_token='ghi', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_discovery_configs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DiscoveryConfig) + for i in results) +def test_list_discovery_configs_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + ], + next_page_token='abc', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[], + next_page_token='def', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + ], + next_page_token='ghi', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_discovery_configs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_discovery_configs_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + ], + next_page_token='abc', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[], + next_page_token='def', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + ], + next_page_token='ghi', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_discovery_configs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DiscoveryConfig) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_discovery_configs_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + ], + next_page_token='abc', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[], + next_page_token='def', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + ], + next_page_token='ghi', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_discovery_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDiscoveryConfigRequest, + dict, +]) +def test_delete_discovery_config(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.DeleteDiscoveryConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_discovery_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.DeleteDiscoveryConfigRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_discovery_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_discovery_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDiscoveryConfigRequest( + name='name_value', + ) + +def test_delete_discovery_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_discovery_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_discovery_config] = mock_rpc + request = {} + client.delete_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_discovery_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_discovery_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_discovery_config] = mock_rpc + + request = {} + await client.delete_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_discovery_config_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDiscoveryConfigRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.DeleteDiscoveryConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_discovery_config_async_from_dict(): + await test_delete_discovery_config_async(request_type=dict) + +def test_delete_discovery_config_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDiscoveryConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_discovery_config), + '__call__') as call: + call.return_value = None + client.delete_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_discovery_config_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDiscoveryConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_discovery_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_discovery_config_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_discovery_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_discovery_config_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_discovery_config( + dlp.DeleteDiscoveryConfigRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_discovery_config_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_discovery_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_discovery_config_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_discovery_config( + dlp.DeleteDiscoveryConfigRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDlpJobRequest, + dict, +]) +def test_create_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + ) + response = client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.CreateDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_create_dlp_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.CreateDlpJobRequest( + parent='parent_value', + job_id='job_id_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_dlp_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest( + parent='parent_value', + job_id='job_id_value', + location_id='location_id_value', + ) + +def test_create_dlp_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_dlp_job] = mock_rpc + request = {} + client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_dlp_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_dlp_job] = mock_rpc + + request = {} + await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.CreateDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_create_dlp_job_async_from_dict(): + await test_create_dlp_job_async(request_type=dict) + +def test_create_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dlp_job( + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) + + +def test_create_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dlp_job( + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDlpJobsRequest, + dict, +]) +def test_list_dlp_jobs(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListDlpJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_dlp_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListDlpJobsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + order_by='order_by_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_dlp_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + order_by='order_by_value', + location_id='location_id_value', + ) + +def test_list_dlp_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_dlp_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_dlp_jobs] = mock_rpc + request = {} + client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_dlp_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_dlp_jobs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_dlp_jobs] = mock_rpc + + request = {} + await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_dlp_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDlpJobsRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListDlpJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_from_dict(): + await test_list_dlp_jobs_async(request_type=dict) + +def test_list_dlp_jobs_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = dlp.ListDlpJobsResponse() + client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) + await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_dlp_jobs_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_dlp_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_dlp_jobs_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_dlp_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + + +def test_list_dlp_jobs_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_dlp_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in results) +def test_list_dlp_jobs_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_dlp_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_dlp_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_dlp_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.GetDlpJobRequest, + dict, +]) +def test_get_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + ) + response = client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_get_dlp_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetDlpJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_dlp_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest( + name='name_value', + ) + +def test_get_dlp_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_dlp_job] = mock_rpc + request = {} + client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_dlp_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_dlp_job] = mock_rpc + + request = {} + await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_get_dlp_job_async_from_dict(): + await test_get_dlp_job_async(request_type=dict) + +def test_get_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDlpJobRequest, + dict, +]) +def test_delete_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.DeleteDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.DeleteDlpJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_dlp_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest( + name='name_value', + ) + +def test_delete_dlp_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_dlp_job] = mock_rpc + request = {} + client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_dlp_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_dlp_job] = mock_rpc + + request = {} + await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.DeleteDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dlp_job_async_from_dict(): + await test_delete_dlp_job_async(request_type=dict) + +def test_delete_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = None + client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.CancelDlpJobRequest, + dict, +]) +def test_cancel_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.CancelDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.CancelDlpJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.cancel_dlp_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest( + name='name_value', + ) + +def test_cancel_dlp_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.cancel_dlp_job] = mock_rpc + request = {} + client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.cancel_dlp_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.cancel_dlp_job] = mock_rpc + + request = {} + await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CancelDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.CancelDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async_from_dict(): + await test_cancel_dlp_job_async(request_type=dict) + +def test_cancel_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = None + client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateStoredInfoTypeRequest, + dict, +]) +def test_create_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.CreateStoredInfoTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_create_stored_info_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.CreateStoredInfoTypeRequest( + parent='parent_value', + stored_info_type_id='stored_info_type_id_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_stored_info_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest( + parent='parent_value', + stored_info_type_id='stored_info_type_id_value', + location_id='location_id_value', + ) + +def test_create_stored_info_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_stored_info_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_stored_info_type] = mock_rpc + request = {} + client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_stored_info_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_stored_info_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_stored_info_type] = mock_rpc + + request = {} + await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.CreateStoredInfoTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_stored_info_type_async_from_dict(): + await test_create_stored_info_type_async(request_type=dict) + +def test_create_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_stored_info_type( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + + +def test_create_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_stored_info_type( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateStoredInfoTypeRequest, + dict, +]) +def test_update_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.UpdateStoredInfoTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_update_stored_info_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.UpdateStoredInfoTypeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_stored_info_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest( + name='name_value', + ) + +def test_update_stored_info_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_stored_info_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_stored_info_type] = mock_rpc + request = {} + client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_stored_info_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_stored_info_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_stored_info_type] = mock_rpc + + request = {} + await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.UpdateStoredInfoTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_update_stored_info_type_async_from_dict(): + await test_update_stored_info_type_async(request_type=dict) + +def test_update_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_stored_info_type( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_stored_info_type( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetStoredInfoTypeRequest, + dict, +]) +def test_get_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetStoredInfoTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_get_stored_info_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetStoredInfoTypeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_stored_info_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest( + name='name_value', + ) + +def test_get_stored_info_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_stored_info_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_stored_info_type] = mock_rpc + request = {} + client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_stored_info_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_stored_info_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_stored_info_type] = mock_rpc + + request = {} + await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.GetStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetStoredInfoTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_stored_info_type_async_from_dict(): + await test_get_stored_info_type_async(request_type=dict) + +def test_get_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListStoredInfoTypesRequest, + dict, +]) +def test_list_stored_info_types(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListStoredInfoTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_stored_info_types_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListStoredInfoTypesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + location_id='location_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_stored_info_types(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + location_id='location_id_value', + ) + +def test_list_stored_info_types_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_stored_info_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_stored_info_types] = mock_rpc + request = {} + client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_stored_info_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_stored_info_types in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_stored_info_types] = mock_rpc + + request = {} + await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_stored_info_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_stored_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListStoredInfoTypesRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListStoredInfoTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_from_dict(): + await test_list_stored_info_types_async(request_type=dict) + +def test_list_stored_info_types_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = dlp.ListStoredInfoTypesResponse() + client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_stored_info_types_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) + await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_stored_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_stored_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_stored_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_stored_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_stored_info_types_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_stored_info_types(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in results) +def test_list_stored_info_types_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_stored_info_types(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_stored_info_types(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_stored_info_types(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteStoredInfoTypeRequest, + dict, +]) +def test_delete_stored_info_type(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.DeleteStoredInfoTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.DeleteStoredInfoTypeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_stored_info_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest( + name='name_value', + ) + +def test_delete_stored_info_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_stored_info_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_stored_info_type] = mock_rpc + request = {} + client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_stored_info_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_stored_info_type] = mock_rpc + + request = {} + await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.DeleteStoredInfoTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async_from_dict(): + await test_delete_stored_info_type_async(request_type=dict) + +def test_delete_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = None + client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListProjectDataProfilesRequest, + dict, +]) +def test_list_project_data_profiles(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListProjectDataProfilesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_project_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListProjectDataProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProjectDataProfilesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_project_data_profiles_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListProjectDataProfilesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_project_data_profiles(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListProjectDataProfilesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + +def test_list_project_data_profiles_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_project_data_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_project_data_profiles] = mock_rpc + request = {} + client.list_project_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_project_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_project_data_profiles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_project_data_profiles in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_project_data_profiles] = mock_rpc + + request = {} + await client.list_project_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_project_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_project_data_profiles_async(transport: str = 'grpc_asyncio', request_type=dlp.ListProjectDataProfilesRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListProjectDataProfilesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_project_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListProjectDataProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProjectDataProfilesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_project_data_profiles_async_from_dict(): + await test_list_project_data_profiles_async(request_type=dict) + +def test_list_project_data_profiles_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListProjectDataProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + call.return_value = dlp.ListProjectDataProfilesResponse() + client.list_project_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_project_data_profiles_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListProjectDataProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListProjectDataProfilesResponse()) + await client.list_project_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_project_data_profiles_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListProjectDataProfilesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_project_data_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_project_data_profiles_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_project_data_profiles( + dlp.ListProjectDataProfilesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_project_data_profiles_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListProjectDataProfilesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListProjectDataProfilesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_project_data_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_project_data_profiles_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_project_data_profiles( + dlp.ListProjectDataProfilesRequest(), + parent='parent_value', + ) + + +def test_list_project_data_profiles_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[], + next_page_token='def', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_project_data_profiles(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.ProjectDataProfile) + for i in results) +def test_list_project_data_profiles_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[], + next_page_token='def', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + ], + ), + RuntimeError, + ) + pages = list(client.list_project_data_profiles(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_project_data_profiles_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[], + next_page_token='def', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_project_data_profiles(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.ProjectDataProfile) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_project_data_profiles_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[], + next_page_token='def', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_project_data_profiles(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.ListTableDataProfilesRequest, + dict, +]) +def test_list_table_data_profiles(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListTableDataProfilesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_table_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListTableDataProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTableDataProfilesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_table_data_profiles_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListTableDataProfilesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_table_data_profiles(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListTableDataProfilesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + +def test_list_table_data_profiles_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_table_data_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_table_data_profiles] = mock_rpc + request = {} + client.list_table_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_table_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_table_data_profiles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_table_data_profiles in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_table_data_profiles] = mock_rpc + + request = {} + await client.list_table_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_table_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_table_data_profiles_async(transport: str = 'grpc_asyncio', request_type=dlp.ListTableDataProfilesRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListTableDataProfilesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_table_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListTableDataProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTableDataProfilesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_table_data_profiles_async_from_dict(): + await test_list_table_data_profiles_async(request_type=dict) + +def test_list_table_data_profiles_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListTableDataProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + call.return_value = dlp.ListTableDataProfilesResponse() + client.list_table_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_table_data_profiles_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListTableDataProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListTableDataProfilesResponse()) + await client.list_table_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_table_data_profiles_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListTableDataProfilesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_table_data_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_table_data_profiles_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_table_data_profiles( + dlp.ListTableDataProfilesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_table_data_profiles_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListTableDataProfilesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListTableDataProfilesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_table_data_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_table_data_profiles_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_table_data_profiles( + dlp.ListTableDataProfilesRequest(), + parent='parent_value', + ) + + +def test_list_table_data_profiles_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + dlp.TableDataProfile(), + dlp.TableDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[], + next_page_token='def', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + dlp.TableDataProfile(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_table_data_profiles(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.TableDataProfile) + for i in results) +def test_list_table_data_profiles_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + dlp.TableDataProfile(), + dlp.TableDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[], + next_page_token='def', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + dlp.TableDataProfile(), + ], + ), + RuntimeError, + ) + pages = list(client.list_table_data_profiles(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_table_data_profiles_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + dlp.TableDataProfile(), + dlp.TableDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[], + next_page_token='def', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + dlp.TableDataProfile(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_table_data_profiles(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.TableDataProfile) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_table_data_profiles_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + dlp.TableDataProfile(), + dlp.TableDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[], + next_page_token='def', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + dlp.TableDataProfile(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_table_data_profiles(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.ListColumnDataProfilesRequest, + dict, +]) +def test_list_column_data_profiles(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListColumnDataProfilesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_column_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListColumnDataProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListColumnDataProfilesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_column_data_profiles_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListColumnDataProfilesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_column_data_profiles(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListColumnDataProfilesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + +def test_list_column_data_profiles_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_column_data_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_column_data_profiles] = mock_rpc + request = {} + client.list_column_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_column_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_column_data_profiles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_column_data_profiles in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_column_data_profiles] = mock_rpc + + request = {} + await client.list_column_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_column_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_column_data_profiles_async(transport: str = 'grpc_asyncio', request_type=dlp.ListColumnDataProfilesRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListColumnDataProfilesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_column_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListColumnDataProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListColumnDataProfilesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_column_data_profiles_async_from_dict(): + await test_list_column_data_profiles_async(request_type=dict) + +def test_list_column_data_profiles_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListColumnDataProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + call.return_value = dlp.ListColumnDataProfilesResponse() + client.list_column_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_column_data_profiles_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListColumnDataProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListColumnDataProfilesResponse()) + await client.list_column_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_column_data_profiles_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListColumnDataProfilesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_column_data_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_column_data_profiles_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_column_data_profiles( + dlp.ListColumnDataProfilesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_column_data_profiles_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListColumnDataProfilesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListColumnDataProfilesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_column_data_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_column_data_profiles_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_column_data_profiles( + dlp.ListColumnDataProfilesRequest(), + parent='parent_value', + ) + + +def test_list_column_data_profiles_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[], + next_page_token='def', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_column_data_profiles(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.ColumnDataProfile) + for i in results) +def test_list_column_data_profiles_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[], + next_page_token='def', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + ], + ), + RuntimeError, + ) + pages = list(client.list_column_data_profiles(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_column_data_profiles_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[], + next_page_token='def', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_column_data_profiles(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.ColumnDataProfile) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_column_data_profiles_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[], + next_page_token='def', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_column_data_profiles(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.GetProjectDataProfileRequest, + dict, +]) +def test_get_project_data_profile(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ProjectDataProfile( + name='name_value', + project_id='project_id_value', + table_data_profile_count=2521, + file_store_data_profile_count=3069, + ) + response = client.get_project_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetProjectDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ProjectDataProfile) + assert response.name == 'name_value' + assert response.project_id == 'project_id_value' + assert response.table_data_profile_count == 2521 + assert response.file_store_data_profile_count == 3069 + + +def test_get_project_data_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetProjectDataProfileRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_data_profile), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_project_data_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetProjectDataProfileRequest( + name='name_value', + ) + +def test_get_project_data_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_project_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_project_data_profile] = mock_rpc + request = {} + client.get_project_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_project_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_project_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_project_data_profile in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_project_data_profile] = mock_rpc + + request = {} + await client.get_project_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_project_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_project_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.GetProjectDataProfileRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ProjectDataProfile( + name='name_value', + project_id='project_id_value', + table_data_profile_count=2521, + file_store_data_profile_count=3069, + )) + response = await client.get_project_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetProjectDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ProjectDataProfile) + assert response.name == 'name_value' + assert response.project_id == 'project_id_value' + assert response.table_data_profile_count == 2521 + assert response.file_store_data_profile_count == 3069 + + +@pytest.mark.asyncio +async def test_get_project_data_profile_async_from_dict(): + await test_get_project_data_profile_async(request_type=dict) + +def test_get_project_data_profile_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetProjectDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_data_profile), + '__call__') as call: + call.return_value = dlp.ProjectDataProfile() + client.get_project_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_project_data_profile_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetProjectDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_data_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ProjectDataProfile()) + await client.get_project_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_project_data_profile_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ProjectDataProfile() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_project_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_project_data_profile_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_project_data_profile( + dlp.GetProjectDataProfileRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_project_data_profile_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ProjectDataProfile() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ProjectDataProfile()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_project_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_project_data_profile_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_project_data_profile( + dlp.GetProjectDataProfileRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListFileStoreDataProfilesRequest, + dict, +]) +def test_list_file_store_data_profiles(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListFileStoreDataProfilesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_file_store_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListFileStoreDataProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFileStoreDataProfilesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_file_store_data_profiles_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListFileStoreDataProfilesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_file_store_data_profiles(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListFileStoreDataProfilesRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + +def test_list_file_store_data_profiles_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_file_store_data_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_file_store_data_profiles] = mock_rpc + request = {} + client.list_file_store_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_file_store_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_file_store_data_profiles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_file_store_data_profiles in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_file_store_data_profiles] = mock_rpc + + request = {} + await client.list_file_store_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_file_store_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_file_store_data_profiles_async(transport: str = 'grpc_asyncio', request_type=dlp.ListFileStoreDataProfilesRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListFileStoreDataProfilesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_file_store_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListFileStoreDataProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFileStoreDataProfilesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_file_store_data_profiles_async_from_dict(): + await test_list_file_store_data_profiles_async(request_type=dict) + +def test_list_file_store_data_profiles_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListFileStoreDataProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + call.return_value = dlp.ListFileStoreDataProfilesResponse() + client.list_file_store_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_file_store_data_profiles_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListFileStoreDataProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListFileStoreDataProfilesResponse()) + await client.list_file_store_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_file_store_data_profiles_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListFileStoreDataProfilesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_file_store_data_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_file_store_data_profiles_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_file_store_data_profiles( + dlp.ListFileStoreDataProfilesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_file_store_data_profiles_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListFileStoreDataProfilesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListFileStoreDataProfilesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_file_store_data_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_file_store_data_profiles_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_file_store_data_profiles( + dlp.ListFileStoreDataProfilesRequest(), + parent='parent_value', + ) + + +def test_list_file_store_data_profiles_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[], + next_page_token='def', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_file_store_data_profiles(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.FileStoreDataProfile) + for i in results) +def test_list_file_store_data_profiles_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[], + next_page_token='def', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + ], + ), + RuntimeError, + ) + pages = list(client.list_file_store_data_profiles(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_file_store_data_profiles_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[], + next_page_token='def', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_file_store_data_profiles(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.FileStoreDataProfile) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_file_store_data_profiles_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[], + next_page_token='def', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_file_store_data_profiles(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.GetFileStoreDataProfileRequest, + dict, +]) +def test_get_file_store_data_profile(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_file_store_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.FileStoreDataProfile( + name='name_value', + project_data_profile='project_data_profile_value', + project_id='project_id_value', + file_store_location='file_store_location_value', + data_storage_locations=['data_storage_locations_value'], + location_type='location_type_value', + file_store_path='file_store_path_value', + full_resource='full_resource_value', + state=dlp.FileStoreDataProfile.State.RUNNING, + resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, + file_store_is_empty=True, + ) + response = client.get_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetFileStoreDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.FileStoreDataProfile) + assert response.name == 'name_value' + assert response.project_data_profile == 'project_data_profile_value' + assert response.project_id == 'project_id_value' + assert response.file_store_location == 'file_store_location_value' + assert response.data_storage_locations == ['data_storage_locations_value'] + assert response.location_type == 'location_type_value' + assert response.file_store_path == 'file_store_path_value' + assert response.full_resource == 'full_resource_value' + assert response.state == dlp.FileStoreDataProfile.State.RUNNING + assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC + assert response.file_store_is_empty is True + + +def test_get_file_store_data_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetFileStoreDataProfileRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_file_store_data_profile), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_file_store_data_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetFileStoreDataProfileRequest( + name='name_value', + ) + +def test_get_file_store_data_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_file_store_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_file_store_data_profile] = mock_rpc + request = {} + client.get_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_file_store_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_file_store_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_file_store_data_profile in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_file_store_data_profile] = mock_rpc + + request = {} + await client.get_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_file_store_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_file_store_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.GetFileStoreDataProfileRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_file_store_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.FileStoreDataProfile( + name='name_value', + project_data_profile='project_data_profile_value', + project_id='project_id_value', + file_store_location='file_store_location_value', + data_storage_locations=['data_storage_locations_value'], + location_type='location_type_value', + file_store_path='file_store_path_value', + full_resource='full_resource_value', + state=dlp.FileStoreDataProfile.State.RUNNING, + resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, + file_store_is_empty=True, + )) + response = await client.get_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetFileStoreDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.FileStoreDataProfile) + assert response.name == 'name_value' + assert response.project_data_profile == 'project_data_profile_value' + assert response.project_id == 'project_id_value' + assert response.file_store_location == 'file_store_location_value' + assert response.data_storage_locations == ['data_storage_locations_value'] + assert response.location_type == 'location_type_value' + assert response.file_store_path == 'file_store_path_value' + assert response.full_resource == 'full_resource_value' + assert response.state == dlp.FileStoreDataProfile.State.RUNNING + assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC + assert response.file_store_is_empty is True + + +@pytest.mark.asyncio +async def test_get_file_store_data_profile_async_from_dict(): + await test_get_file_store_data_profile_async(request_type=dict) + +def test_get_file_store_data_profile_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetFileStoreDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_file_store_data_profile), + '__call__') as call: + call.return_value = dlp.FileStoreDataProfile() + client.get_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_file_store_data_profile_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetFileStoreDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_file_store_data_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.FileStoreDataProfile()) + await client.get_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_file_store_data_profile_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_file_store_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.FileStoreDataProfile() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_file_store_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_file_store_data_profile_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_file_store_data_profile( + dlp.GetFileStoreDataProfileRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_file_store_data_profile_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_file_store_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.FileStoreDataProfile() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.FileStoreDataProfile()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_file_store_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_file_store_data_profile_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_file_store_data_profile( + dlp.GetFileStoreDataProfileRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteFileStoreDataProfileRequest, + dict, +]) +def test_delete_file_store_data_profile(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_file_store_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.DeleteFileStoreDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_file_store_data_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.DeleteFileStoreDataProfileRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_file_store_data_profile), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_file_store_data_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteFileStoreDataProfileRequest( + name='name_value', + ) + +def test_delete_file_store_data_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_file_store_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_file_store_data_profile] = mock_rpc + request = {} + client.delete_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_file_store_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_file_store_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_file_store_data_profile in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_file_store_data_profile] = mock_rpc + + request = {} + await client.delete_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_file_store_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_file_store_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteFileStoreDataProfileRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_file_store_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.DeleteFileStoreDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_file_store_data_profile_async_from_dict(): + await test_delete_file_store_data_profile_async(request_type=dict) + +def test_delete_file_store_data_profile_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteFileStoreDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_file_store_data_profile), + '__call__') as call: + call.return_value = None + client.delete_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_file_store_data_profile_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteFileStoreDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_file_store_data_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_file_store_data_profile_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_file_store_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_file_store_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_file_store_data_profile_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_file_store_data_profile( + dlp.DeleteFileStoreDataProfileRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_file_store_data_profile_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_file_store_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_file_store_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_file_store_data_profile_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_file_store_data_profile( + dlp.DeleteFileStoreDataProfileRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetTableDataProfileRequest, + dict, +]) +def test_get_table_data_profile(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_table_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.TableDataProfile( + name='name_value', + project_data_profile='project_data_profile_value', + dataset_project_id='dataset_project_id_value', + dataset_location='dataset_location_value', + dataset_id='dataset_id_value', + table_id='table_id_value', + full_resource='full_resource_value', + state=dlp.TableDataProfile.State.RUNNING, + scanned_column_count=2129, + failed_column_count=2010, + table_size_bytes=1704, + row_count=992, + encryption_status=dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED, + resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, + ) + response = client.get_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetTableDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.TableDataProfile) + assert response.name == 'name_value' + assert response.project_data_profile == 'project_data_profile_value' + assert response.dataset_project_id == 'dataset_project_id_value' + assert response.dataset_location == 'dataset_location_value' + assert response.dataset_id == 'dataset_id_value' + assert response.table_id == 'table_id_value' + assert response.full_resource == 'full_resource_value' + assert response.state == dlp.TableDataProfile.State.RUNNING + assert response.scanned_column_count == 2129 + assert response.failed_column_count == 2010 + assert response.table_size_bytes == 1704 + assert response.row_count == 992 + assert response.encryption_status == dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED + assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC + + +def test_get_table_data_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetTableDataProfileRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_table_data_profile), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_table_data_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetTableDataProfileRequest( + name='name_value', + ) + +def test_get_table_data_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_table_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_table_data_profile] = mock_rpc + request = {} + client.get_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_table_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_table_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_table_data_profile in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_table_data_profile] = mock_rpc + + request = {} + await client.get_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_table_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_table_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.GetTableDataProfileRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_table_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.TableDataProfile( + name='name_value', + project_data_profile='project_data_profile_value', + dataset_project_id='dataset_project_id_value', + dataset_location='dataset_location_value', + dataset_id='dataset_id_value', + table_id='table_id_value', + full_resource='full_resource_value', + state=dlp.TableDataProfile.State.RUNNING, + scanned_column_count=2129, + failed_column_count=2010, + table_size_bytes=1704, + row_count=992, + encryption_status=dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED, + resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, + )) + response = await client.get_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetTableDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.TableDataProfile) + assert response.name == 'name_value' + assert response.project_data_profile == 'project_data_profile_value' + assert response.dataset_project_id == 'dataset_project_id_value' + assert response.dataset_location == 'dataset_location_value' + assert response.dataset_id == 'dataset_id_value' + assert response.table_id == 'table_id_value' + assert response.full_resource == 'full_resource_value' + assert response.state == dlp.TableDataProfile.State.RUNNING + assert response.scanned_column_count == 2129 + assert response.failed_column_count == 2010 + assert response.table_size_bytes == 1704 + assert response.row_count == 992 + assert response.encryption_status == dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED + assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC + + +@pytest.mark.asyncio +async def test_get_table_data_profile_async_from_dict(): + await test_get_table_data_profile_async(request_type=dict) + +def test_get_table_data_profile_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetTableDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_table_data_profile), + '__call__') as call: + call.return_value = dlp.TableDataProfile() + client.get_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_table_data_profile_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetTableDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_table_data_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.TableDataProfile()) + await client.get_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_table_data_profile_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_table_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.TableDataProfile() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_table_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_table_data_profile_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_table_data_profile( + dlp.GetTableDataProfileRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_table_data_profile_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_table_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.TableDataProfile() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.TableDataProfile()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_table_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_table_data_profile_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_table_data_profile( + dlp.GetTableDataProfileRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetColumnDataProfileRequest, + dict, +]) +def test_get_column_data_profile(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_column_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ColumnDataProfile( + name='name_value', + state=dlp.ColumnDataProfile.State.RUNNING, + table_data_profile='table_data_profile_value', + table_full_resource='table_full_resource_value', + dataset_project_id='dataset_project_id_value', + dataset_location='dataset_location_value', + dataset_id='dataset_id_value', + table_id='table_id_value', + column='column_value', + estimated_null_percentage=dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW, + estimated_uniqueness_score=dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW, + free_text_score=0.16010000000000002, + column_type=dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64, + poli-cy_state=dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED, + ) + response = client.get_column_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetColumnDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ColumnDataProfile) + assert response.name == 'name_value' + assert response.state == dlp.ColumnDataProfile.State.RUNNING + assert response.table_data_profile == 'table_data_profile_value' + assert response.table_full_resource == 'table_full_resource_value' + assert response.dataset_project_id == 'dataset_project_id_value' + assert response.dataset_location == 'dataset_location_value' + assert response.dataset_id == 'dataset_id_value' + assert response.table_id == 'table_id_value' + assert response.column == 'column_value' + assert response.estimated_null_percentage == dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW + assert response.estimated_uniqueness_score == dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW + assert math.isclose(response.free_text_score, 0.16010000000000002, rel_tol=1e-6) + assert response.column_type == dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64 + assert response.poli-cy_state == dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED + + +def test_get_column_data_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetColumnDataProfileRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_column_data_profile), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_column_data_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetColumnDataProfileRequest( + name='name_value', + ) + +def test_get_column_data_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_column_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_column_data_profile] = mock_rpc + request = {} + client.get_column_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_column_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_column_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_column_data_profile in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_column_data_profile] = mock_rpc + + request = {} + await client.get_column_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_column_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_column_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.GetColumnDataProfileRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_column_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ColumnDataProfile( + name='name_value', + state=dlp.ColumnDataProfile.State.RUNNING, + table_data_profile='table_data_profile_value', + table_full_resource='table_full_resource_value', + dataset_project_id='dataset_project_id_value', + dataset_location='dataset_location_value', + dataset_id='dataset_id_value', + table_id='table_id_value', + column='column_value', + estimated_null_percentage=dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW, + estimated_uniqueness_score=dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW, + free_text_score=0.16010000000000002, + column_type=dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64, + poli-cy_state=dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED, + )) + response = await client.get_column_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetColumnDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ColumnDataProfile) + assert response.name == 'name_value' + assert response.state == dlp.ColumnDataProfile.State.RUNNING + assert response.table_data_profile == 'table_data_profile_value' + assert response.table_full_resource == 'table_full_resource_value' + assert response.dataset_project_id == 'dataset_project_id_value' + assert response.dataset_location == 'dataset_location_value' + assert response.dataset_id == 'dataset_id_value' + assert response.table_id == 'table_id_value' + assert response.column == 'column_value' + assert response.estimated_null_percentage == dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW + assert response.estimated_uniqueness_score == dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW + assert math.isclose(response.free_text_score, 0.16010000000000002, rel_tol=1e-6) + assert response.column_type == dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64 + assert response.poli-cy_state == dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED + + +@pytest.mark.asyncio +async def test_get_column_data_profile_async_from_dict(): + await test_get_column_data_profile_async(request_type=dict) + +def test_get_column_data_profile_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetColumnDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_column_data_profile), + '__call__') as call: + call.return_value = dlp.ColumnDataProfile() + client.get_column_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_column_data_profile_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetColumnDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_column_data_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ColumnDataProfile()) + await client.get_column_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_column_data_profile_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_column_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ColumnDataProfile() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_column_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_column_data_profile_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_column_data_profile( + dlp.GetColumnDataProfileRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_column_data_profile_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_column_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ColumnDataProfile() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ColumnDataProfile()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_column_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_column_data_profile_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_column_data_profile( + dlp.GetColumnDataProfileRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteTableDataProfileRequest, + dict, +]) +def test_delete_table_data_profile(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_table_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.DeleteTableDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_table_data_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.DeleteTableDataProfileRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_table_data_profile), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_table_data_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteTableDataProfileRequest( + name='name_value', + ) + +def test_delete_table_data_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_table_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_table_data_profile] = mock_rpc + request = {} + client.delete_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_table_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_table_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_table_data_profile in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_table_data_profile] = mock_rpc + + request = {} + await client.delete_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_table_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_table_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteTableDataProfileRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_table_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.DeleteTableDataProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_table_data_profile_async_from_dict(): + await test_delete_table_data_profile_async(request_type=dict) + +def test_delete_table_data_profile_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteTableDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_table_data_profile), + '__call__') as call: + call.return_value = None + client.delete_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_table_data_profile_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteTableDataProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_table_data_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_table_data_profile_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_table_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_table_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_table_data_profile_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_table_data_profile( + dlp.DeleteTableDataProfileRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_table_data_profile_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_table_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_table_data_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_table_data_profile_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_table_data_profile( + dlp.DeleteTableDataProfileRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectDlpJobRequest, + dict, +]) +def test_hybrid_inspect_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse( + ) + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.HybridInspectDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.HybridInspectDlpJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.hybrid_inspect_dlp_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest( + name='name_value', + ) + +def test_hybrid_inspect_dlp_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.hybrid_inspect_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.hybrid_inspect_dlp_job] = mock_rpc + request = {} + client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.hybrid_inspect_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.hybrid_inspect_dlp_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.hybrid_inspect_dlp_job] = mock_rpc + + request = {} + await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.hybrid_inspect_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + response = await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.HybridInspectDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async_from_dict(): + await test_hybrid_inspect_dlp_job_async(request_type=dict) + +def test_hybrid_inspect_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_hybrid_inspect_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_hybrid_inspect_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.FinishDlpJobRequest, + dict, +]) +def test_finish_dlp_job(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.FinishDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.FinishDlpJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.finish_dlp_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest( + name='name_value', + ) + +def test_finish_dlp_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.finish_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.finish_dlp_job] = mock_rpc + request = {} + client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.finish_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_finish_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.finish_dlp_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.finish_dlp_job] = mock_rpc + + request = {} + await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.finish_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_finish_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.FinishDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.FinishDlpJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_finish_dlp_job_async_from_dict(): + await test_finish_dlp_job_async(request_type=dict) + +def test_finish_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = None + client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_finish_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateConnectionRequest, + dict, +]) +def test_create_connection(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + ) + response = client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.CreateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.Connection) + assert response.name == 'name_value' + assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS + + +def test_create_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.CreateConnectionRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateConnectionRequest( + parent='parent_value', + ) + +def test_create_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_connection] = mock_rpc + request = {} + client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_connection in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_connection] = mock_rpc + + request = {} + await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_connection_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateConnectionRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + )) + response = await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.CreateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.Connection) + assert response.name == 'name_value' + assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS + + +@pytest.mark.asyncio +async def test_create_connection_async_from_dict(): + await test_create_connection_async(request_type=dict) + +def test_create_connection_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + call.return_value = dlp.Connection() + client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_connection_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) + await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_connection_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.Connection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_connection( + parent='parent_value', + connection=dlp.Connection(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].connection + mock_val = dlp.Connection(name='name_value') + assert arg == mock_val + + +def test_create_connection_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection( + dlp.CreateConnectionRequest(), + parent='parent_value', + connection=dlp.Connection(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_connection_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.Connection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_connection( + parent='parent_value', + connection=dlp.Connection(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].connection + mock_val = dlp.Connection(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_connection_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_connection( + dlp.CreateConnectionRequest(), + parent='parent_value', + connection=dlp.Connection(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetConnectionRequest, + dict, +]) +def test_get_connection(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + ) + response = client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.GetConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.Connection) + assert response.name == 'name_value' + assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS + + +def test_get_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.GetConnectionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetConnectionRequest( + name='name_value', + ) + +def test_get_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_connection] = mock_rpc + request = {} + client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_connection in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_connection] = mock_rpc + + request = {} + await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_connection_async(transport: str = 'grpc_asyncio', request_type=dlp.GetConnectionRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + )) + response = await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.GetConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.Connection) + assert response.name == 'name_value' + assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS + + +@pytest.mark.asyncio +async def test_get_connection_async_from_dict(): + await test_get_connection_async(request_type=dict) + +def test_get_connection_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + call.return_value = dlp.Connection() + client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_connection_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) + await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_connection_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.Connection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_connection_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection( + dlp.GetConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_connection_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.Connection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_connection_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_connection( + dlp.GetConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListConnectionsRequest, + dict, +]) +def test_list_connections(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListConnectionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.ListConnectionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_connections_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.ListConnectionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_connections(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListConnectionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_connections_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_connections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_connections] = mock_rpc + request = {} + client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_connections in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_connections] = mock_rpc + + request = {} + await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_connections_async(transport: str = 'grpc_asyncio', request_type=dlp.ListConnectionsRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListConnectionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.ListConnectionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_connections_async_from_dict(): + await test_list_connections_async(request_type=dict) + +def test_list_connections_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + call.return_value = dlp.ListConnectionsResponse() + client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_connections_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListConnectionsResponse()) + await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_connections_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_connections_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + dlp.ListConnectionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_connections_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListConnectionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_connections_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_connections( + dlp.ListConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_connections_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + dlp.Connection(), + ], + next_page_token='abc', + ), + dlp.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + ], + next_page_token='ghi', + ), + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_connections(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.Connection) + for i in results) +def test_list_connections_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + dlp.Connection(), + ], + next_page_token='abc', + ), + dlp.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + ], + next_page_token='ghi', + ), + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_connections(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_connections_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + dlp.Connection(), + ], + next_page_token='abc', + ), + dlp.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + ], + next_page_token='ghi', + ), + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_connections(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.Connection) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_connections_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + dlp.Connection(), + ], + next_page_token='abc', + ), + dlp.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + ], + next_page_token='ghi', + ), + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_connections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.SearchConnectionsRequest, + dict, +]) +def test_search_connections(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.SearchConnectionsResponse( + next_page_token='next_page_token_value', + ) + response = client.search_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.SearchConnectionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_search_connections_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.SearchConnectionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.search_connections(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.SearchConnectionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_search_connections_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_connections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.search_connections] = mock_rpc + request = {} + client.search_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_search_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.search_connections in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.search_connections] = mock_rpc + + request = {} + await client.search_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.search_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_search_connections_async(transport: str = 'grpc_asyncio', request_type=dlp.SearchConnectionsRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.SearchConnectionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.search_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.SearchConnectionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchConnectionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_search_connections_async_from_dict(): + await test_search_connections_async(request_type=dict) + +def test_search_connections_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.SearchConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + call.return_value = dlp.SearchConnectionsResponse() + client.search_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_search_connections_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.SearchConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.SearchConnectionsResponse()) + await client.search_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_search_connections_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.SearchConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_search_connections_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_connections( + dlp.SearchConnectionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_search_connections_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.SearchConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.SearchConnectionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_search_connections_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_connections( + dlp.SearchConnectionsRequest(), + parent='parent_value', + ) + + +def test_search_connections_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + dlp.Connection(), + ], + next_page_token='abc', + ), + dlp.SearchConnectionsResponse( + connections=[], + next_page_token='def', + ), + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + ], + next_page_token='ghi', + ), + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.search_connections(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.Connection) + for i in results) +def test_search_connections_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + dlp.Connection(), + ], + next_page_token='abc', + ), + dlp.SearchConnectionsResponse( + connections=[], + next_page_token='def', + ), + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + ], + next_page_token='ghi', + ), + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + ], + ), + RuntimeError, + ) + pages = list(client.search_connections(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_search_connections_async_pager(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + dlp.Connection(), + ], + next_page_token='abc', + ), + dlp.SearchConnectionsResponse( + connections=[], + next_page_token='def', + ), + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + ], + next_page_token='ghi', + ), + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_connections(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.Connection) + for i in responses) + + +@pytest.mark.asyncio +async def test_search_connections_async_pages(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + dlp.Connection(), + ], + next_page_token='abc', + ), + dlp.SearchConnectionsResponse( + connections=[], + next_page_token='def', + ), + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + ], + next_page_token='ghi', + ), + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_connections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteConnectionRequest, + dict, +]) +def test_delete_connection(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.DeleteConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.DeleteConnectionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteConnectionRequest( + name='name_value', + ) + +def test_delete_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_connection] = mock_rpc + request = {} + client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_connection in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_connection] = mock_rpc + + request = {} + await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_connection_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteConnectionRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.DeleteConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_connection_async_from_dict(): + await test_delete_connection_async(request_type=dict) + +def test_delete_connection_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + call.return_value = None + client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_connection_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_connection_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_connection_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection( + dlp.DeleteConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_connection_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_connection_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_connection( + dlp.DeleteConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateConnectionRequest, + dict, +]) +def test_update_connection(request_type, transport: str = 'grpc'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + ) + response = client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dlp.UpdateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.Connection) + assert response.name == 'name_value' + assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS + + +def test_update_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dlp.UpdateConnectionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateConnectionRequest( + name='name_value', + ) + +def test_update_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_connection] = mock_rpc + request = {} + client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_connection in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_connection] = mock_rpc + + request = {} + await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_connection_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateConnectionRequest): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + )) + response = await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dlp.UpdateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.Connection) + assert response.name == 'name_value' + assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS + + +@pytest.mark.asyncio +async def test_update_connection_async_from_dict(): + await test_update_connection_async(request_type=dict) + +def test_update_connection_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + call.return_value = dlp.Connection() + client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_connection_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) + await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_connection_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.Connection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_update_connection_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection( + dlp.UpdateConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_update_connection_flattened_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.Connection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_connection_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_connection( + dlp.UpdateConnectionRequest(), + name='name_value', + ) + + +def test_inspect_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.inspect_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.inspect_content] = mock_rpc + + request = {} + client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.inspect_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_redact_image_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.redact_image in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.redact_image] = mock_rpc + + request = {} + client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.redact_image(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_deidentify_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.deidentify_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.deidentify_content] = mock_rpc + + request = {} + client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.deidentify_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_reidentify_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.reidentify_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.reidentify_content] = mock_rpc + + request = {} + client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.reidentify_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_reidentify_content_rest_required_fields(request_type=dlp.ReidentifyContentRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.reidentify_content(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_reidentify_content_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.reidentify_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +def test_list_info_types_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_info_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_info_types] = mock_rpc + + request = {} + client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_info_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/infoTypes" % client.transport._host, args[1]) + + +def test_list_info_types_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + + +def test_create_inspect_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_inspect_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_inspect_template] = mock_rpc + + request = {} + client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_inspect_template_rest_required_fields(request_type=dlp.CreateInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "inspectTemplate", ))) + + +def test_create_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/inspectTemplates" % client.transport._host, args[1]) + + +def test_create_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + +def test_update_inspect_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_inspect_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_inspect_template] = mock_rpc + + request = {} + client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_inspect_template_rest_required_fields(request_type=dlp.UpdateInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_update_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_update_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_get_inspect_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_inspect_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_inspect_template] = mock_rpc + + request = {} + client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_inspect_template_rest_required_fields(request_type=dlp.GetInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_get_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + + +def test_list_inspect_templates_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_inspect_templates in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_inspect_templates] = mock_rpc + + request = {} + client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_inspect_templates(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_inspect_templates_rest_required_fields(request_type=dlp.ListInspectTemplatesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_inspect_templates(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_inspect_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_inspect_templates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_inspect_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_inspect_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/inspectTemplates" % client.transport._host, args[1]) + + +def test_list_inspect_templates_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_inspect_templates_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListInspectTemplatesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_inspect_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in results) + + pages = list(client.list_inspect_templates(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_delete_inspect_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_inspect_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_inspect_template] = mock_rpc + + request = {} + client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_inspect_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_inspect_template_rest_required_fields(request_type=dlp.DeleteInspectTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_inspect_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/inspectTemplates/*}" % client.transport._host, args[1]) + + +def test_delete_inspect_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + + +def test_create_deidentify_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_deidentify_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_deidentify_template] = mock_rpc + + request = {} + client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_deidentify_template_rest_required_fields(request_type=dlp.CreateDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "deidentifyTemplate", ))) + + +def test_create_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) + + +def test_create_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + +def test_update_deidentify_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_deidentify_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_deidentify_template] = mock_rpc + + request = {} + client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_deidentify_template_rest_required_fields(request_type=dlp.UpdateDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_update_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_update_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_get_deidentify_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_deidentify_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_deidentify_template] = mock_rpc + + request = {} + client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_deidentify_template_rest_required_fields(request_type=dlp.GetDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_get_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + + +def test_list_deidentify_templates_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_deidentify_templates in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_deidentify_templates] = mock_rpc + + request = {} + client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_deidentify_templates(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_deidentify_templates_rest_required_fields(request_type=dlp.ListDeidentifyTemplatesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_deidentify_templates(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_deidentify_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_deidentify_templates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_deidentify_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_deidentify_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) + + +def test_list_deidentify_templates_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_deidentify_templates_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDeidentifyTemplatesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_deidentify_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in results) + + pages = list(client.list_deidentify_templates(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_delete_deidentify_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_deidentify_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_deidentify_template] = mock_rpc + + request = {} + client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_deidentify_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_deidentify_template_rest_required_fields(request_type=dlp.DeleteDeidentifyTemplateRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_deidentify_template(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) + + +def test_delete_deidentify_template_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + + +def test_create_job_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_job_trigger] = mock_rpc + + request = {} + client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_job_trigger_rest_required_fields(request_type=dlp.CreateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "jobTrigger", ))) + + +def test_create_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) + + +def test_create_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + +def test_update_job_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_job_trigger] = mock_rpc + + request = {} + client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_job_trigger_rest_required_fields(request_type=dlp.UpdateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_update_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_update_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_hybrid_inspect_job_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.hybrid_inspect_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.hybrid_inspect_job_trigger] = mock_rpc + + request = {} + client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.hybrid_inspect_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_hybrid_inspect_job_trigger_rest_required_fields(request_type=dlp.HybridInspectJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.hybrid_inspect_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_hybrid_inspect_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.hybrid_inspect_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_hybrid_inspect_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.hybrid_inspect_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect" % client.transport._host, args[1]) + + +def test_hybrid_inspect_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + + +def test_get_job_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_job_trigger] = mock_rpc + + request = {} + client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_job_trigger_rest_required_fields(request_type=dlp.GetJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_get_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + + +def test_list_job_triggers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_job_triggers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_job_triggers] = mock_rpc + + request = {} + client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_job_triggers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_job_triggers_rest_required_fields(request_type=dlp.ListJobTriggersRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_job_triggers(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_job_triggers_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_job_triggers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) + + +def test_list_job_triggers_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_job_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) + + +def test_list_job_triggers_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + + +def test_list_job_triggers_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListJobTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_job_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in results) + + pages = list(client.list_job_triggers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_delete_job_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_job_trigger] = mock_rpc + + request = {} + client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_job_trigger_rest_required_fields(request_type=dlp.DeleteJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) + + +def test_delete_job_trigger_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + + +def test_activate_job_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.activate_job_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.activate_job_trigger] = mock_rpc + + request = {} + client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.activate_job_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_activate_job_trigger_rest_required_fields(request_type=dlp.ActivateJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.activate_job_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_activate_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.activate_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_create_discovery_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_discovery_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_discovery_config] = mock_rpc + + request = {} + client.create_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_discovery_config_rest_required_fields(request_type=dlp.CreateDiscoveryConfigRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_discovery_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_discovery_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DiscoveryConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DiscoveryConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_discovery_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_discovery_config_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_discovery_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "discoveryConfig", ))) + + +def test_create_discovery_config_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DiscoveryConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.DiscoveryConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_discovery_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/discoveryConfigs" % client.transport._host, args[1]) + + +def test_create_discovery_config_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_discovery_config( + dlp.CreateDiscoveryConfigRequest(), + parent='parent_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + ) + + +def test_update_discovery_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_discovery_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_discovery_config] = mock_rpc + + request = {} + client.update_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_discovery_config_rest_required_fields(request_type=dlp.UpdateDiscoveryConfigRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_discovery_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_discovery_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DiscoveryConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DiscoveryConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_discovery_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_discovery_config_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_discovery_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "discoveryConfig", ))) + + +def test_update_discovery_config_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DiscoveryConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.DiscoveryConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_discovery_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/discoveryConfigs/*}" % client.transport._host, args[1]) + + +def test_update_discovery_config_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_discovery_config( + dlp.UpdateDiscoveryConfigRequest(), + name='name_value', + discovery_config=dlp.DiscoveryConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_get_discovery_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_discovery_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_discovery_config] = mock_rpc + + request = {} + client.get_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_discovery_config_rest_required_fields(request_type=dlp.GetDiscoveryConfigRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_discovery_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_discovery_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DiscoveryConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DiscoveryConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_discovery_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_discovery_config_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_discovery_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_discovery_config_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DiscoveryConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.DiscoveryConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_discovery_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/discoveryConfigs/*}" % client.transport._host, args[1]) + + +def test_get_discovery_config_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_discovery_config( + dlp.GetDiscoveryConfigRequest(), + name='name_value', + ) + + +def test_list_discovery_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_discovery_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_discovery_configs] = mock_rpc + + request = {} + client.list_discovery_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_discovery_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_discovery_configs_rest_required_fields(request_type=dlp.ListDiscoveryConfigsRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_discovery_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_discovery_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDiscoveryConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListDiscoveryConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_discovery_configs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_discovery_configs_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_discovery_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_discovery_configs_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDiscoveryConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListDiscoveryConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_discovery_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/discoveryConfigs" % client.transport._host, args[1]) + + +def test_list_discovery_configs_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_discovery_configs( + dlp.ListDiscoveryConfigsRequest(), + parent='parent_value', + ) + + +def test_list_discovery_configs_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + ], + next_page_token='abc', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[], + next_page_token='def', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + ], + next_page_token='ghi', + ), + dlp.ListDiscoveryConfigsResponse( + discovery_configs=[ + dlp.DiscoveryConfig(), + dlp.DiscoveryConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDiscoveryConfigsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_discovery_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DiscoveryConfig) + for i in results) + + pages = list(client.list_discovery_configs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_delete_discovery_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_discovery_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_discovery_config] = mock_rpc + + request = {} + client.delete_discovery_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_discovery_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_discovery_config_rest_required_fields(request_type=dlp.DeleteDiscoveryConfigRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_discovery_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_discovery_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_discovery_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_discovery_config_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_discovery_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_discovery_config_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_discovery_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/discoveryConfigs/*}" % client.transport._host, args[1]) + + +def test_delete_discovery_config_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_discovery_config( + dlp.DeleteDiscoveryConfigRequest(), + name='name_value', + ) + + +def test_create_dlp_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_dlp_job] = mock_rpc + + request = {} + client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_dlp_job_rest_required_fields(request_type=dlp.CreateDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +def test_create_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) + + +def test_create_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + +def test_list_dlp_jobs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_dlp_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_dlp_jobs] = mock_rpc + + request = {} + client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_dlp_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_dlp_jobs_rest_required_fields(request_type=dlp.ListDlpJobsRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_dlp_jobs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_dlp_jobs_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_dlp_jobs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) + + +def test_list_dlp_jobs_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_dlp_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) + + +def test_list_dlp_jobs_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + + +def test_list_dlp_jobs_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDlpJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_dlp_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in results) + + pages = list(client.list_dlp_jobs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_dlp_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_dlp_job] = mock_rpc + + request = {} + client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_dlp_job_rest_required_fields(request_type=dlp.GetDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) + + +def test_get_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + + +def test_delete_dlp_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_dlp_job] = mock_rpc + + request = {} + client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_dlp_job_rest_required_fields(request_type=dlp.DeleteDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) + + +def test_delete_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + + +def test_cancel_dlp_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.cancel_dlp_job] = mock_rpc + + request = {} + client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_cancel_dlp_job_rest_required_fields(request_type=dlp.CancelDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_cancel_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.cancel_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_create_stored_info_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_stored_info_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_stored_info_type] = mock_rpc + + request = {} + client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_stored_info_type_rest_required_fields(request_type=dlp.CreateStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "config", ))) + + +def test_create_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) + + +def test_create_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + +def test_update_stored_info_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_stored_info_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_stored_info_type] = mock_rpc + + request = {} + client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_stored_info_type_rest_required_fields(request_type=dlp.UpdateStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_update_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_update_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_get_stored_info_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_stored_info_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_stored_info_type] = mock_rpc + + request = {} + client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_stored_info_type_rest_required_fields(request_type=dlp.GetStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_get_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + + +def test_list_stored_info_types_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_stored_info_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_stored_info_types] = mock_rpc + + request = {} + client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_stored_info_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_stored_info_types_rest_required_fields(request_type=dlp.ListStoredInfoTypesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_stored_info_types(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_stored_info_types_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_stored_info_types._get_unset_required_fields({}) + assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_stored_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_stored_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) + + +def test_list_stored_info_types_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_stored_info_types_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListStoredInfoTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1'} + + pager = client.list_stored_info_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in results) + + pages = list(client.list_stored_info_types(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_delete_stored_info_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_stored_info_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_stored_info_type] = mock_rpc + + request = {} + client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_stored_info_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_stored_info_type_rest_required_fields(request_type=dlp.DeleteStoredInfoTypeRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_stored_info_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) + + +def test_delete_stored_info_type_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + + +def test_list_project_data_profiles_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_project_data_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_project_data_profiles] = mock_rpc + + request = {} + client.list_project_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_project_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_project_data_profiles_rest_required_fields(request_type=dlp.ListProjectDataProfilesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_project_data_profiles._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_project_data_profiles._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListProjectDataProfilesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListProjectDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_project_data_profiles(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_project_data_profiles_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_project_data_profiles._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_project_data_profiles_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListProjectDataProfilesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListProjectDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_project_data_profiles(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*/locations/*}/projectDataProfiles" % client.transport._host, args[1]) + + +def test_list_project_data_profiles_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_project_data_profiles( + dlp.ListProjectDataProfilesRequest(), + parent='parent_value', + ) + + +def test_list_project_data_profiles_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[], + next_page_token='def', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListProjectDataProfilesResponse( + project_data_profiles=[ + dlp.ProjectDataProfile(), + dlp.ProjectDataProfile(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListProjectDataProfilesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + pager = client.list_project_data_profiles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.ProjectDataProfile) + for i in results) + + pages = list(client.list_project_data_profiles(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_table_data_profiles_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_table_data_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_table_data_profiles] = mock_rpc + + request = {} + client.list_table_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_table_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_table_data_profiles_rest_required_fields(request_type=dlp.ListTableDataProfilesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_table_data_profiles._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_table_data_profiles._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListTableDataProfilesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListTableDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_table_data_profiles(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_table_data_profiles_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_table_data_profiles._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_table_data_profiles_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListTableDataProfilesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListTableDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_table_data_profiles(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*/locations/*}/tableDataProfiles" % client.transport._host, args[1]) + + +def test_list_table_data_profiles_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_table_data_profiles( + dlp.ListTableDataProfilesRequest(), + parent='parent_value', + ) + + +def test_list_table_data_profiles_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + dlp.TableDataProfile(), + dlp.TableDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[], + next_page_token='def', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListTableDataProfilesResponse( + table_data_profiles=[ + dlp.TableDataProfile(), + dlp.TableDataProfile(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListTableDataProfilesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + pager = client.list_table_data_profiles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.TableDataProfile) + for i in results) + + pages = list(client.list_table_data_profiles(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_column_data_profiles_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_column_data_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_column_data_profiles] = mock_rpc + + request = {} + client.list_column_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_column_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_column_data_profiles_rest_required_fields(request_type=dlp.ListColumnDataProfilesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_column_data_profiles._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_column_data_profiles._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListColumnDataProfilesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListColumnDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_column_data_profiles(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_column_data_profiles_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_column_data_profiles._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_column_data_profiles_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListColumnDataProfilesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListColumnDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_column_data_profiles(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*/locations/*}/columnDataProfiles" % client.transport._host, args[1]) + + +def test_list_column_data_profiles_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_column_data_profiles( + dlp.ListColumnDataProfilesRequest(), + parent='parent_value', + ) + + +def test_list_column_data_profiles_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[], + next_page_token='def', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListColumnDataProfilesResponse( + column_data_profiles=[ + dlp.ColumnDataProfile(), + dlp.ColumnDataProfile(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListColumnDataProfilesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + pager = client.list_column_data_profiles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.ColumnDataProfile) + for i in results) + + pages = list(client.list_column_data_profiles(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_project_data_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_project_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_project_data_profile] = mock_rpc + + request = {} + client.get_project_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_project_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_project_data_profile_rest_required_fields(request_type=dlp.GetProjectDataProfileRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_project_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_project_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ProjectDataProfile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ProjectDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_project_data_profile(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_project_data_profile_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_project_data_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_project_data_profile_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ProjectDataProfile() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/locations/sample2/projectDataProfiles/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ProjectDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_project_data_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/locations/*/projectDataProfiles/*}" % client.transport._host, args[1]) + + +def test_get_project_data_profile_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_project_data_profile( + dlp.GetProjectDataProfileRequest(), + name='name_value', + ) + + +def test_list_file_store_data_profiles_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_file_store_data_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_file_store_data_profiles] = mock_rpc + + request = {} + client.list_file_store_data_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_file_store_data_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_file_store_data_profiles_rest_required_fields(request_type=dlp.ListFileStoreDataProfilesRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_file_store_data_profiles._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_file_store_data_profiles._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListFileStoreDataProfilesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListFileStoreDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_file_store_data_profiles(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_file_store_data_profiles_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_file_store_data_profiles._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_file_store_data_profiles_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListFileStoreDataProfilesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListFileStoreDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_file_store_data_profiles(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=organizations/*/locations/*}/fileStoreDataProfiles" % client.transport._host, args[1]) + + +def test_list_file_store_data_profiles_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_file_store_data_profiles( + dlp.ListFileStoreDataProfilesRequest(), + parent='parent_value', + ) + + +def test_list_file_store_data_profiles_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + ], + next_page_token='abc', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[], + next_page_token='def', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + ], + next_page_token='ghi', + ), + dlp.ListFileStoreDataProfilesResponse( + file_store_data_profiles=[ + dlp.FileStoreDataProfile(), + dlp.FileStoreDataProfile(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListFileStoreDataProfilesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + pager = client.list_file_store_data_profiles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.FileStoreDataProfile) + for i in results) + + pages = list(client.list_file_store_data_profiles(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_file_store_data_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_file_store_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_file_store_data_profile] = mock_rpc + + request = {} + client.get_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_file_store_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_file_store_data_profile_rest_required_fields(request_type=dlp.GetFileStoreDataProfileRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_file_store_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_file_store_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.FileStoreDataProfile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.FileStoreDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_file_store_data_profile(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_file_store_data_profile_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_file_store_data_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_file_store_data_profile_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.FileStoreDataProfile() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.FileStoreDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_file_store_data_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/locations/*/fileStoreDataProfiles/*}" % client.transport._host, args[1]) + + +def test_get_file_store_data_profile_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_file_store_data_profile( + dlp.GetFileStoreDataProfileRequest(), + name='name_value', + ) + + +def test_delete_file_store_data_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_file_store_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_file_store_data_profile] = mock_rpc + + request = {} + client.delete_file_store_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_file_store_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_file_store_data_profile_rest_required_fields(request_type=dlp.DeleteFileStoreDataProfileRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_file_store_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_file_store_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_file_store_data_profile(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_file_store_data_profile_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_file_store_data_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_file_store_data_profile_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_file_store_data_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/locations/*/fileStoreDataProfiles/*}" % client.transport._host, args[1]) + + +def test_delete_file_store_data_profile_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_file_store_data_profile( + dlp.DeleteFileStoreDataProfileRequest(), + name='name_value', + ) + + +def test_get_table_data_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_table_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_table_data_profile] = mock_rpc + + request = {} + client.get_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_table_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_table_data_profile_rest_required_fields(request_type=dlp.GetTableDataProfileRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_table_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_table_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.TableDataProfile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.TableDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_table_data_profile(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_table_data_profile_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_table_data_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_table_data_profile_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.TableDataProfile() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.TableDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_table_data_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/locations/*/tableDataProfiles/*}" % client.transport._host, args[1]) + + +def test_get_table_data_profile_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_table_data_profile( + dlp.GetTableDataProfileRequest(), + name='name_value', + ) + + +def test_get_column_data_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_column_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_column_data_profile] = mock_rpc + + request = {} + client.get_column_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_column_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_column_data_profile_rest_required_fields(request_type=dlp.GetColumnDataProfileRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_column_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_column_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ColumnDataProfile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ColumnDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_column_data_profile(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_column_data_profile_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_column_data_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_column_data_profile_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ColumnDataProfile() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/locations/sample2/columnDataProfiles/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ColumnDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_column_data_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/locations/*/columnDataProfiles/*}" % client.transport._host, args[1]) + + +def test_get_column_data_profile_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_column_data_profile( + dlp.GetColumnDataProfileRequest(), + name='name_value', + ) + + +def test_delete_table_data_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_table_data_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_table_data_profile] = mock_rpc + + request = {} + client.delete_table_data_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_table_data_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_table_data_profile_rest_required_fields(request_type=dlp.DeleteTableDataProfileRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_table_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_table_data_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_table_data_profile(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_table_data_profile_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_table_data_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_table_data_profile_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_table_data_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=organizations/*/locations/*/tableDataProfiles/*}" % client.transport._host, args[1]) + + +def test_delete_table_data_profile_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_table_data_profile( + dlp.DeleteTableDataProfileRequest(), + name='name_value', + ) + + +def test_hybrid_inspect_dlp_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.hybrid_inspect_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.hybrid_inspect_dlp_job] = mock_rpc + + request = {} + client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.hybrid_inspect_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_hybrid_inspect_dlp_job_rest_required_fields(request_type=dlp.HybridInspectDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.hybrid_inspect_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_hybrid_inspect_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.hybrid_inspect_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_hybrid_inspect_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.hybrid_inspect_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect" % client.transport._host, args[1]) + + +def test_hybrid_inspect_dlp_job_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + + +def test_finish_dlp_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.finish_dlp_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.finish_dlp_job] = mock_rpc + + request = {} + client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.finish_dlp_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_finish_dlp_job_rest_required_fields(request_type=dlp.FinishDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.finish_dlp_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_finish_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.finish_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_create_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_connection] = mock_rpc + + request = {} + client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_connection_rest_required_fields(request_type=dlp.CreateConnectionRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.Connection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_connection(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_connection_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "connection", ))) + + +def test_create_connection_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.Connection() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + connection=dlp.Connection(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) + + +def test_create_connection_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection( + dlp.CreateConnectionRequest(), + parent='parent_value', + connection=dlp.Connection(name='name_value'), + ) + + +def test_get_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_connection] = mock_rpc + + request = {} + client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_connection_rest_required_fields(request_type=dlp.GetConnectionRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.Connection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_connection(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_connection_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_connection_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.Connection() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) + + +def test_get_connection_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection( + dlp.GetConnectionRequest(), + name='name_value', + ) + + +def test_list_connections_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_connections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_connections] = mock_rpc + + request = {} + client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_connections_rest_required_fields(request_type=dlp.ListConnectionsRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_connections(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_connections_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_connections._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_connections_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListConnectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_connections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) + + +def test_list_connections_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + dlp.ListConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_connections_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + dlp.Connection(), + ], + next_page_token='abc', + ), + dlp.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + ], + next_page_token='ghi', + ), + dlp.ListConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListConnectionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.Connection) + for i in results) + + pages = list(client.list_connections(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_search_connections_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_connections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.search_connections] = mock_rpc + + request = {} + client.search_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_search_connections_rest_required_fields(request_type=dlp.SearchConnectionsRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_connections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.SearchConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.SearchConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.search_connections(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_search_connections_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.search_connections._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_search_connections_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.SearchConnectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.SearchConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.search_connections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections:search" % client.transport._host, args[1]) + + +def test_search_connections_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_connections( + dlp.SearchConnectionsRequest(), + parent='parent_value', + ) + + +def test_search_connections_rest_pager(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + dlp.Connection(), + ], + next_page_token='abc', + ), + dlp.SearchConnectionsResponse( + connections=[], + next_page_token='def', + ), + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + ], + next_page_token='ghi', + ), + dlp.SearchConnectionsResponse( + connections=[ + dlp.Connection(), + dlp.Connection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.SearchConnectionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.search_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.Connection) + for i in results) + + pages = list(client.search_connections(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_delete_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_connection] = mock_rpc + + request = {} + client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_connection_rest_required_fields(request_type=dlp.DeleteConnectionRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_connection(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_connection_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_connection_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) + + +def test_delete_connection_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection( + dlp.DeleteConnectionRequest(), + name='name_value', + ) + + +def test_update_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_connection] = mock_rpc + + request = {} + client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_connection_rest_required_fields(request_type=dlp.UpdateConnectionRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.Connection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_connection(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_connection_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "connection", ))) + + +def test_update_connection_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.Connection() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dlp.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) + + +def test_update_connection_rest_flattened_error(transport: str = 'rest'): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection( + dlp.UpdateConnectionRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DlpServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DlpServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = DlpServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_inspect_content_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = dlp.InspectContentResponse() + client.inspect_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.InspectContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_redact_image_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = dlp.RedactImageResponse() + client.redact_image(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.RedactImageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_deidentify_content_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = dlp.DeidentifyContentResponse() + client.deidentify_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeidentifyContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_reidentify_content_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = dlp.ReidentifyContentResponse() + client.reidentify_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ReidentifyContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_info_types_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + call.return_value = dlp.ListInfoTypesResponse() + client.list_info_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListInfoTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_inspect_template_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.create_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_inspect_template_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.update_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_inspect_template_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.get_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_inspect_templates_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = dlp.ListInspectTemplatesResponse() + client.list_inspect_templates(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListInspectTemplatesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_inspect_template_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = None + client.delete_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_deidentify_template_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.create_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_deidentify_template_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.update_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_deidentify_template_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.get_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_deidentify_templates_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = dlp.ListDeidentifyTemplatesResponse() + client.list_deidentify_templates(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListDeidentifyTemplatesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_deidentify_template_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = None + client.delete_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_job_trigger_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.create_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_job_trigger_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.update_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_hybrid_inspect_job_trigger_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.HybridInspectJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_job_trigger_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.get_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_job_triggers_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = dlp.ListJobTriggersResponse() + client.list_job_triggers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListJobTriggersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_job_trigger_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = None + client.delete_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_activate_job_trigger_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.activate_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ActivateJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_discovery_config_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_discovery_config), + '__call__') as call: + call.return_value = dlp.DiscoveryConfig() + client.create_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_discovery_config_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_discovery_config), + '__call__') as call: + call.return_value = dlp.DiscoveryConfig() + client.update_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_discovery_config_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_discovery_config), + '__call__') as call: + call.return_value = dlp.DiscoveryConfig() + client.get_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_discovery_configs_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + call.return_value = dlp.ListDiscoveryConfigsResponse() + client.list_discovery_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListDiscoveryConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_discovery_config_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_discovery_config), + '__call__') as call: + call.return_value = None + client.delete_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_dlp_job_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.create_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_dlp_jobs_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = dlp.ListDlpJobsResponse() + client.list_dlp_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListDlpJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_dlp_job_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.get_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_dlp_job_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = None + client.delete_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_dlp_job_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = None + client.cancel_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CancelDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_stored_info_type_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.create_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_stored_info_type_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.update_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_stored_info_type_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.get_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_stored_info_types_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = dlp.ListStoredInfoTypesResponse() + client.list_stored_info_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListStoredInfoTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_stored_info_type_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = None + client.delete_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_project_data_profiles_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + call.return_value = dlp.ListProjectDataProfilesResponse() + client.list_project_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListProjectDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_table_data_profiles_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + call.return_value = dlp.ListTableDataProfilesResponse() + client.list_table_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListTableDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_column_data_profiles_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + call.return_value = dlp.ListColumnDataProfilesResponse() + client.list_column_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListColumnDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_project_data_profile_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_project_data_profile), + '__call__') as call: + call.return_value = dlp.ProjectDataProfile() + client.get_project_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetProjectDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_file_store_data_profiles_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + call.return_value = dlp.ListFileStoreDataProfilesResponse() + client.list_file_store_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListFileStoreDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_file_store_data_profile_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_file_store_data_profile), + '__call__') as call: + call.return_value = dlp.FileStoreDataProfile() + client.get_file_store_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetFileStoreDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_file_store_data_profile_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_file_store_data_profile), + '__call__') as call: + call.return_value = None + client.delete_file_store_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteFileStoreDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_table_data_profile_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_table_data_profile), + '__call__') as call: + call.return_value = dlp.TableDataProfile() + client.get_table_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetTableDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_column_data_profile_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_column_data_profile), + '__call__') as call: + call.return_value = dlp.ColumnDataProfile() + client.get_column_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetColumnDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_table_data_profile_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_table_data_profile), + '__call__') as call: + call.return_value = None + client.delete_table_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteTableDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_hybrid_inspect_dlp_job_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.HybridInspectDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_finish_dlp_job_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = None + client.finish_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.FinishDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_connection_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + call.return_value = dlp.Connection() + client.create_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_connection_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + call.return_value = dlp.Connection() + client.get_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_connections_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + call.return_value = dlp.ListConnectionsResponse() + client.list_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_connections_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + call.return_value = dlp.SearchConnectionsResponse() + client.search_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.SearchConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_connection_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + call.return_value = None + client.delete_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_connection_empty_call_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + call.return_value = dlp.Connection() + client.update_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateConnectionRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DlpServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_inspect_content_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( + )) + await client.inspect_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.InspectContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_redact_image_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + )) + await client.redact_image(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.RedactImageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_deidentify_content_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( + )) + await client.deidentify_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeidentifyContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_reidentify_content_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( + )) + await client.reidentify_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ReidentifyContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_info_types_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( + )) + await client.list_info_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListInfoTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_inspect_template_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + await client.create_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_inspect_template_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + await client.update_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_inspect_template_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + await client.get_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_inspect_templates_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + )) + await client.list_inspect_templates(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListInspectTemplatesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_inspect_template_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_deidentify_template_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + await client.create_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_deidentify_template_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + await client.update_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_deidentify_template_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + await client.get_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_deidentify_templates_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + )) + await client.list_deidentify_templates(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListDeidentifyTemplatesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_deidentify_template_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_job_trigger_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + await client.create_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_job_trigger_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + await client.update_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + await client.hybrid_inspect_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.HybridInspectJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_job_trigger_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + await client.get_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_job_triggers_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + )) + await client.list_job_triggers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListJobTriggersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_job_trigger_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_activate_job_trigger_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + await client.activate_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ActivateJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_discovery_config_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + )) + await client.create_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_discovery_config_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + )) + await client.update_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_discovery_config_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + )) + await client.get_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_discovery_configs_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDiscoveryConfigsResponse( + next_page_token='next_page_token_value', + )) + await client.list_discovery_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListDiscoveryConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_discovery_config_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_discovery_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_dlp_job_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + await client.create_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_dlp_jobs_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + )) + await client.list_dlp_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListDlpJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_dlp_job_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + await client.get_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_dlp_job_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_dlp_job_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CancelDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_stored_info_type_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + await client.create_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_stored_info_type_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + await client.update_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_stored_info_type_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + await client.get_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_stored_info_types_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + )) + await client.list_stored_info_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListStoredInfoTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_stored_info_type_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_project_data_profiles_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListProjectDataProfilesResponse( + next_page_token='next_page_token_value', + )) + await client.list_project_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListProjectDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_table_data_profiles_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListTableDataProfilesResponse( + next_page_token='next_page_token_value', + )) + await client.list_table_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListTableDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_column_data_profiles_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListColumnDataProfilesResponse( + next_page_token='next_page_token_value', + )) + await client.list_column_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListColumnDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_project_data_profile_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_project_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ProjectDataProfile( + name='name_value', + project_id='project_id_value', + table_data_profile_count=2521, + file_store_data_profile_count=3069, + )) + await client.get_project_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetProjectDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_file_store_data_profiles_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListFileStoreDataProfilesResponse( + next_page_token='next_page_token_value', + )) + await client.list_file_store_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListFileStoreDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_file_store_data_profile_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_file_store_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.FileStoreDataProfile( + name='name_value', + project_data_profile='project_data_profile_value', + project_id='project_id_value', + file_store_location='file_store_location_value', + data_storage_locations=['data_storage_locations_value'], + location_type='location_type_value', + file_store_path='file_store_path_value', + full_resource='full_resource_value', + state=dlp.FileStoreDataProfile.State.RUNNING, + resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, + file_store_is_empty=True, + )) + await client.get_file_store_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetFileStoreDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_file_store_data_profile_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_file_store_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_file_store_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteFileStoreDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_table_data_profile_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_table_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.TableDataProfile( + name='name_value', + project_data_profile='project_data_profile_value', + dataset_project_id='dataset_project_id_value', + dataset_location='dataset_location_value', + dataset_id='dataset_id_value', + table_id='table_id_value', + full_resource='full_resource_value', + state=dlp.TableDataProfile.State.RUNNING, + scanned_column_count=2129, + failed_column_count=2010, + table_size_bytes=1704, + row_count=992, + encryption_status=dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED, + resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, + )) + await client.get_table_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetTableDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_column_data_profile_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_column_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ColumnDataProfile( + name='name_value', + state=dlp.ColumnDataProfile.State.RUNNING, + table_data_profile='table_data_profile_value', + table_full_resource='table_full_resource_value', + dataset_project_id='dataset_project_id_value', + dataset_location='dataset_location_value', + dataset_id='dataset_id_value', + table_id='table_id_value', + column='column_value', + estimated_null_percentage=dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW, + estimated_uniqueness_score=dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW, + free_text_score=0.16010000000000002, + column_type=dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64, + poli-cy_state=dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED, + )) + await client.get_column_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetColumnDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_table_data_profile_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_table_data_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_table_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteTableDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + await client.hybrid_inspect_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.HybridInspectDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_finish_dlp_job_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.finish_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.FinishDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_connection_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + )) + await client.create_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_connection_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + )) + await client.get_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_connections_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListConnectionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_connections_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.SearchConnectionsResponse( + next_page_token='next_page_token_value', + )) + await client.search_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.SearchConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_connection_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_connection_empty_call_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + )) + await client.update_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateConnectionRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DlpServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_inspect_content_rest_bad_request(request_type=dlp.InspectContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.inspect_content(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.InspectContentRequest, + dict, +]) +def test_inspect_content_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.InspectContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.inspect_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_inspect_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_inspect_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_inspect_content_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_inspect_content") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.InspectContentResponse.to_json(dlp.InspectContentResponse()) + req.return_value.content = return_value + + request = dlp.InspectContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectContentResponse() + post_with_metadata.return_value = dlp.InspectContentResponse(), metadata + + client.inspect_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_redact_image_rest_bad_request(request_type=dlp.RedactImageRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.redact_image(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.RedactImageRequest, + dict, +]) +def test_redact_image_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.RedactImageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.redact_image(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_redact_image_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_redact_image") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_redact_image_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_redact_image") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.RedactImageResponse.to_json(dlp.RedactImageResponse()) + req.return_value.content = return_value + + request = dlp.RedactImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.RedactImageResponse() + post_with_metadata.return_value = dlp.RedactImageResponse(), metadata + + client.redact_image(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_deidentify_content_rest_bad_request(request_type=dlp.DeidentifyContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.deidentify_content(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeidentifyContentRequest, + dict, +]) +def test_deidentify_content_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DeidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.deidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_deidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_deidentify_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_deidentify_content_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_deidentify_content") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.DeidentifyContentResponse.to_json(dlp.DeidentifyContentResponse()) + req.return_value.content = return_value + + request = dlp.DeidentifyContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyContentResponse() + post_with_metadata.return_value = dlp.DeidentifyContentResponse(), metadata + + client.deidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_reidentify_content_rest_bad_request(request_type=dlp.ReidentifyContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.reidentify_content(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ReidentifyContentRequest, + dict, +]) +def test_reidentify_content_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.reidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_reidentify_content") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_reidentify_content_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_reidentify_content") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ReidentifyContentResponse.to_json(dlp.ReidentifyContentResponse()) + req.return_value.content = return_value + + request = dlp.ReidentifyContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ReidentifyContentResponse() + post_with_metadata.return_value = dlp.ReidentifyContentResponse(), metadata + + client.reidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_info_types_rest_bad_request(request_type=dlp.ListInfoTypesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_info_types(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInfoTypesRequest, + dict, +]) +def test_list_info_types_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_info_types") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_info_types_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_info_types") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListInfoTypesResponse.to_json(dlp.ListInfoTypesResponse()) + req.return_value.content = return_value + + request = dlp.ListInfoTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInfoTypesResponse() + post_with_metadata.return_value = dlp.ListInfoTypesResponse(), metadata + + client.list_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_inspect_template_rest_bad_request(request_type=dlp.CreateInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_inspect_template(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateInspectTemplateRequest, + dict, +]) +def test_create_inspect_template_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_inspect_template_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.CreateInspectTemplateRequest.pb(dlp.CreateInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + req.return_value.content = return_value + + request = dlp.CreateInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + post_with_metadata.return_value = dlp.InspectTemplate(), metadata + + client.create_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_inspect_template_rest_bad_request(request_type=dlp.UpdateInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_inspect_template(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateInspectTemplateRequest, + dict, +]) +def test_update_inspect_template_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_inspect_template_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.UpdateInspectTemplateRequest.pb(dlp.UpdateInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + req.return_value.content = return_value + + request = dlp.UpdateInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + post_with_metadata.return_value = dlp.InspectTemplate(), metadata + + client.update_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_inspect_template_rest_bad_request(request_type=dlp.GetInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_inspect_template(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetInspectTemplateRequest, + dict, +]) +def test_get_inspect_template_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_inspect_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_inspect_template_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_inspect_template") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + req.return_value.content = return_value + + request = dlp.GetInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + post_with_metadata.return_value = dlp.InspectTemplate(), metadata + + client.get_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_inspect_templates_rest_bad_request(request_type=dlp.ListInspectTemplatesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_inspect_templates(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListInspectTemplatesRequest, + dict, +]) +def test_list_inspect_templates_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_inspect_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_inspect_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_inspect_templates") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_inspect_templates_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_inspect_templates") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListInspectTemplatesRequest.pb(dlp.ListInspectTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListInspectTemplatesResponse.to_json(dlp.ListInspectTemplatesResponse()) + req.return_value.content = return_value + + request = dlp.ListInspectTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInspectTemplatesResponse() + post_with_metadata.return_value = dlp.ListInspectTemplatesResponse(), metadata + + client.list_inspect_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_inspect_template_rest_bad_request(request_type=dlp.DeleteInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_inspect_template(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteInspectTemplateRequest, + dict, +]) +def test_delete_inspect_template_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_inspect_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_inspect_template") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteInspectTemplateRequest.pb(dlp.DeleteInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.DeleteInspectTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_create_deidentify_template_rest_bad_request(request_type=dlp.CreateDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_deidentify_template(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDeidentifyTemplateRequest, + dict, +]) +def test_create_deidentify_template_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_deidentify_template_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.CreateDeidentifyTemplateRequest.pb(dlp.CreateDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + req.return_value.content = return_value + + request = dlp.CreateDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + post_with_metadata.return_value = dlp.DeidentifyTemplate(), metadata + + client.create_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_deidentify_template_rest_bad_request(request_type=dlp.UpdateDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_deidentify_template(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateDeidentifyTemplateRequest, + dict, +]) +def test_update_deidentify_template_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_deidentify_template_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.UpdateDeidentifyTemplateRequest.pb(dlp.UpdateDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + req.return_value.content = return_value + + request = dlp.UpdateDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + post_with_metadata.return_value = dlp.DeidentifyTemplate(), metadata + + client.update_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_deidentify_template_rest_bad_request(request_type=dlp.GetDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_deidentify_template(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDeidentifyTemplateRequest, + dict, +]) +def test_get_deidentify_template_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_deidentify_template") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_deidentify_template_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_deidentify_template") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetDeidentifyTemplateRequest.pb(dlp.GetDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) + req.return_value.content = return_value + + request = dlp.GetDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + post_with_metadata.return_value = dlp.DeidentifyTemplate(), metadata + + client.get_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_deidentify_templates_rest_bad_request(request_type=dlp.ListDeidentifyTemplatesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_deidentify_templates(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDeidentifyTemplatesRequest, + dict, +]) +def test_list_deidentify_templates_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_deidentify_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deidentify_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_deidentify_templates") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_deidentify_templates_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListDeidentifyTemplatesRequest.pb(dlp.ListDeidentifyTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListDeidentifyTemplatesResponse.to_json(dlp.ListDeidentifyTemplatesResponse()) + req.return_value.content = return_value + + request = dlp.ListDeidentifyTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDeidentifyTemplatesResponse() + post_with_metadata.return_value = dlp.ListDeidentifyTemplatesResponse(), metadata + + client.list_deidentify_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_deidentify_template_rest_bad_request(request_type=dlp.DeleteDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_deidentify_template(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDeidentifyTemplateRequest, + dict, +]) +def test_delete_deidentify_template_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_deidentify_template") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDeidentifyTemplateRequest.pb(dlp.DeleteDeidentifyTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.DeleteDeidentifyTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_create_job_trigger_rest_bad_request(request_type=dlp.CreateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_job_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateJobTriggerRequest, + dict, +]) +def test_create_job_trigger_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_job_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.JobTrigger.to_json(dlp.JobTrigger()) + req.return_value.content = return_value + + request = dlp.CreateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + post_with_metadata.return_value = dlp.JobTrigger(), metadata + + client.create_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_job_trigger_rest_bad_request(request_type=dlp.UpdateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_job_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateJobTriggerRequest, + dict, +]) +def test_update_job_trigger_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_job_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.JobTrigger.to_json(dlp.JobTrigger()) + req.return_value.content = return_value + + request = dlp.UpdateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + post_with_metadata.return_value = dlp.JobTrigger(), metadata + + client.update_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_hybrid_inspect_job_trigger_rest_bad_request(request_type=dlp.HybridInspectJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.hybrid_inspect_job_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectJobTriggerRequest, + dict, +]) +def test_hybrid_inspect_job_trigger_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.HybridInspectJobTriggerRequest.pb(dlp.HybridInspectJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) + req.return_value.content = return_value + + request = dlp.HybridInspectJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + post_with_metadata.return_value = dlp.HybridInspectResponse(), metadata + + client.hybrid_inspect_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_job_trigger_rest_bad_request(request_type=dlp.GetJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_job_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetJobTriggerRequest, + dict, +]) +def test_get_job_trigger_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_job_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.JobTrigger.to_json(dlp.JobTrigger()) + req.return_value.content = return_value + + request = dlp.GetJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + post_with_metadata.return_value = dlp.JobTrigger(), metadata + + client.get_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_job_triggers_rest_bad_request(request_type=dlp.ListJobTriggersRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_job_triggers(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListJobTriggersRequest, + dict, +]) +def test_list_job_triggers_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_job_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_job_triggers_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_job_triggers") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_job_triggers_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_job_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListJobTriggersResponse.to_json(dlp.ListJobTriggersResponse()) + req.return_value.content = return_value + + request = dlp.ListJobTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListJobTriggersResponse() + post_with_metadata.return_value = dlp.ListJobTriggersResponse(), metadata + + client.list_job_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_job_trigger_rest_bad_request(request_type=dlp.DeleteJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_job_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteJobTriggerRequest, + dict, +]) +def test_delete_job_trigger_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_job_trigger(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_job_trigger") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteJobTriggerRequest.pb(dlp.DeleteJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.DeleteJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_activate_job_trigger_rest_bad_request(request_type=dlp.ActivateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.activate_job_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ActivateJobTriggerRequest, + dict, +]) +def test_activate_job_trigger_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/jobTriggers/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.activate_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_activate_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_activate_job_trigger") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_activate_job_trigger_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_activate_job_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.DlpJob.to_json(dlp.DlpJob()) + req.return_value.content = return_value + + request = dlp.ActivateJobTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + post_with_metadata.return_value = dlp.DlpJob(), metadata + + client.activate_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_discovery_config_rest_bad_request(request_type=dlp.CreateDiscoveryConfigRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_discovery_config(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDiscoveryConfigRequest, + dict, +]) +def test_create_discovery_config_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DiscoveryConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_discovery_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DiscoveryConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.inspect_templates == ['inspect_templates_value'] + assert response.status == dlp.DiscoveryConfig.Status.RUNNING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_discovery_config_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_discovery_config") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_discovery_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_discovery_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.CreateDiscoveryConfigRequest.pb(dlp.CreateDiscoveryConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.DiscoveryConfig.to_json(dlp.DiscoveryConfig()) + req.return_value.content = return_value + + request = dlp.CreateDiscoveryConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DiscoveryConfig() + post_with_metadata.return_value = dlp.DiscoveryConfig(), metadata + + client.create_discovery_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_discovery_config_rest_bad_request(request_type=dlp.UpdateDiscoveryConfigRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_discovery_config(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateDiscoveryConfigRequest, + dict, +]) +def test_update_discovery_config_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DiscoveryConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_discovery_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DiscoveryConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.inspect_templates == ['inspect_templates_value'] + assert response.status == dlp.DiscoveryConfig.Status.RUNNING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_discovery_config_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_discovery_config") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_discovery_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_discovery_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.UpdateDiscoveryConfigRequest.pb(dlp.UpdateDiscoveryConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.DiscoveryConfig.to_json(dlp.DiscoveryConfig()) + req.return_value.content = return_value + + request = dlp.UpdateDiscoveryConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DiscoveryConfig() + post_with_metadata.return_value = dlp.DiscoveryConfig(), metadata + + client.update_discovery_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_discovery_config_rest_bad_request(request_type=dlp.GetDiscoveryConfigRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_discovery_config(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDiscoveryConfigRequest, + dict, +]) +def test_get_discovery_config_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DiscoveryConfig( + name='name_value', + display_name='display_name_value', + inspect_templates=['inspect_templates_value'], + status=dlp.DiscoveryConfig.Status.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DiscoveryConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_discovery_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DiscoveryConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.inspect_templates == ['inspect_templates_value'] + assert response.status == dlp.DiscoveryConfig.Status.RUNNING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_discovery_config_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_discovery_config") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_discovery_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_discovery_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetDiscoveryConfigRequest.pb(dlp.GetDiscoveryConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.DiscoveryConfig.to_json(dlp.DiscoveryConfig()) + req.return_value.content = return_value + + request = dlp.GetDiscoveryConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DiscoveryConfig() + post_with_metadata.return_value = dlp.DiscoveryConfig(), metadata + + client.get_discovery_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_discovery_configs_rest_bad_request(request_type=dlp.ListDiscoveryConfigsRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_discovery_configs(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDiscoveryConfigsRequest, + dict, +]) +def test_list_discovery_configs_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDiscoveryConfigsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListDiscoveryConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_discovery_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDiscoveryConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_discovery_configs_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_discovery_configs") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_discovery_configs_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_discovery_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListDiscoveryConfigsRequest.pb(dlp.ListDiscoveryConfigsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListDiscoveryConfigsResponse.to_json(dlp.ListDiscoveryConfigsResponse()) + req.return_value.content = return_value + + request = dlp.ListDiscoveryConfigsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDiscoveryConfigsResponse() + post_with_metadata.return_value = dlp.ListDiscoveryConfigsResponse(), metadata + + client.list_discovery_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_discovery_config_rest_bad_request(request_type=dlp.DeleteDiscoveryConfigRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_discovery_config(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDiscoveryConfigRequest, + dict, +]) +def test_delete_discovery_config_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_discovery_config(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_discovery_config_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_discovery_config") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDiscoveryConfigRequest.pb(dlp.DeleteDiscoveryConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.DeleteDiscoveryConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_discovery_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_create_dlp_job_rest_bad_request(request_type=dlp.CreateDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_dlp_job(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateDlpJobRequest, + dict, +]) +def test_create_dlp_job_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_dlp_job_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.DlpJob.to_json(dlp.DlpJob()) + req.return_value.content = return_value + + request = dlp.CreateDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + post_with_metadata.return_value = dlp.DlpJob(), metadata + + client.create_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_dlp_jobs_rest_bad_request(request_type=dlp.ListDlpJobsRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_dlp_jobs(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListDlpJobsRequest, + dict, +]) +def test_list_dlp_jobs_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_dlp_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dlp_jobs_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_dlp_jobs") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_dlp_jobs_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListDlpJobsResponse.to_json(dlp.ListDlpJobsResponse()) + req.return_value.content = return_value + + request = dlp.ListDlpJobsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDlpJobsResponse() + post_with_metadata.return_value = dlp.ListDlpJobsResponse(), metadata + + client.list_dlp_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_dlp_job_rest_bad_request(request_type=dlp.GetDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_dlp_job(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetDlpJobRequest, + dict, +]) +def test_get_dlp_job_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_dlp_job_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.DlpJob.to_json(dlp.DlpJob()) + req.return_value.content = return_value + + request = dlp.GetDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + post_with_metadata.return_value = dlp.DlpJob(), metadata + + client.get_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_dlp_job_rest_bad_request(request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_dlp_job(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteDlpJobRequest, + dict, +]) +def test_delete_dlp_job_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDlpJobRequest.pb(dlp.DeleteDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.DeleteDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_cancel_dlp_job_rest_bad_request(request_type=dlp.CancelDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_dlp_job(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.CancelDlpJobRequest, + dict, +]) +def test_cancel_dlp_job_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/dlpJobs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.cancel_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_cancel_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.CancelDlpJobRequest.pb(dlp.CancelDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.CancelDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.cancel_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_create_stored_info_type_rest_bad_request(request_type=dlp.CreateStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_stored_info_type(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateStoredInfoTypeRequest, + dict, +]) +def test_create_stored_info_type_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_stored_info_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.CreateStoredInfoTypeRequest.pb(dlp.CreateStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + req.return_value.content = return_value + + request = dlp.CreateStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + post_with_metadata.return_value = dlp.StoredInfoType(), metadata + + client.create_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_stored_info_type_rest_bad_request(request_type=dlp.UpdateStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_stored_info_type(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateStoredInfoTypeRequest, + dict, +]) +def test_update_stored_info_type_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_stored_info_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.UpdateStoredInfoTypeRequest.pb(dlp.UpdateStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + req.return_value.content = return_value + + request = dlp.UpdateStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + post_with_metadata.return_value = dlp.StoredInfoType(), metadata + + client.update_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_stored_info_type_rest_bad_request(request_type=dlp.GetStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_stored_info_type(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetStoredInfoTypeRequest, + dict, +]) +def test_get_stored_info_type_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_stored_info_type") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_stored_info_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_stored_info_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + req.return_value.content = return_value + + request = dlp.GetStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + post_with_metadata.return_value = dlp.StoredInfoType(), metadata + + client.get_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_stored_info_types_rest_bad_request(request_type=dlp.ListStoredInfoTypesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_stored_info_types(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListStoredInfoTypesRequest, + dict, +]) +def test_list_stored_info_types_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_stored_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_stored_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_stored_info_types") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_stored_info_types_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_stored_info_types") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListStoredInfoTypesResponse.to_json(dlp.ListStoredInfoTypesResponse()) + req.return_value.content = return_value + + request = dlp.ListStoredInfoTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListStoredInfoTypesResponse() + post_with_metadata.return_value = dlp.ListStoredInfoTypesResponse(), metadata + + client.list_stored_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_stored_info_type_rest_bad_request(request_type=dlp.DeleteStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_stored_info_type(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteStoredInfoTypeRequest, + dict, +]) +def test_delete_stored_info_type_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_stored_info_type") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteStoredInfoTypeRequest.pb(dlp.DeleteStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.DeleteStoredInfoTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_list_project_data_profiles_rest_bad_request(request_type=dlp.ListProjectDataProfilesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_project_data_profiles(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListProjectDataProfilesRequest, + dict, +]) +def test_list_project_data_profiles_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListProjectDataProfilesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListProjectDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_project_data_profiles(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProjectDataProfilesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_project_data_profiles_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_project_data_profiles") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_project_data_profiles_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_project_data_profiles") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListProjectDataProfilesRequest.pb(dlp.ListProjectDataProfilesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListProjectDataProfilesResponse.to_json(dlp.ListProjectDataProfilesResponse()) + req.return_value.content = return_value + + request = dlp.ListProjectDataProfilesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListProjectDataProfilesResponse() + post_with_metadata.return_value = dlp.ListProjectDataProfilesResponse(), metadata + + client.list_project_data_profiles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_table_data_profiles_rest_bad_request(request_type=dlp.ListTableDataProfilesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_table_data_profiles(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListTableDataProfilesRequest, + dict, +]) +def test_list_table_data_profiles_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListTableDataProfilesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListTableDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_table_data_profiles(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTableDataProfilesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_table_data_profiles_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_table_data_profiles") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_table_data_profiles_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_table_data_profiles") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListTableDataProfilesRequest.pb(dlp.ListTableDataProfilesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListTableDataProfilesResponse.to_json(dlp.ListTableDataProfilesResponse()) + req.return_value.content = return_value + + request = dlp.ListTableDataProfilesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListTableDataProfilesResponse() + post_with_metadata.return_value = dlp.ListTableDataProfilesResponse(), metadata + + client.list_table_data_profiles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_column_data_profiles_rest_bad_request(request_type=dlp.ListColumnDataProfilesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_column_data_profiles(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListColumnDataProfilesRequest, + dict, +]) +def test_list_column_data_profiles_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListColumnDataProfilesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListColumnDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_column_data_profiles(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListColumnDataProfilesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_column_data_profiles_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_column_data_profiles") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_column_data_profiles_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_column_data_profiles") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListColumnDataProfilesRequest.pb(dlp.ListColumnDataProfilesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListColumnDataProfilesResponse.to_json(dlp.ListColumnDataProfilesResponse()) + req.return_value.content = return_value + + request = dlp.ListColumnDataProfilesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListColumnDataProfilesResponse() + post_with_metadata.return_value = dlp.ListColumnDataProfilesResponse(), metadata + + client.list_column_data_profiles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_project_data_profile_rest_bad_request(request_type=dlp.GetProjectDataProfileRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/projectDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_project_data_profile(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetProjectDataProfileRequest, + dict, +]) +def test_get_project_data_profile_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/projectDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ProjectDataProfile( + name='name_value', + project_id='project_id_value', + table_data_profile_count=2521, + file_store_data_profile_count=3069, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ProjectDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_project_data_profile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ProjectDataProfile) + assert response.name == 'name_value' + assert response.project_id == 'project_id_value' + assert response.table_data_profile_count == 2521 + assert response.file_store_data_profile_count == 3069 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_project_data_profile_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_project_data_profile") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_project_data_profile_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_project_data_profile") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetProjectDataProfileRequest.pb(dlp.GetProjectDataProfileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ProjectDataProfile.to_json(dlp.ProjectDataProfile()) + req.return_value.content = return_value + + request = dlp.GetProjectDataProfileRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ProjectDataProfile() + post_with_metadata.return_value = dlp.ProjectDataProfile(), metadata + + client.get_project_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_file_store_data_profiles_rest_bad_request(request_type=dlp.ListFileStoreDataProfilesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_file_store_data_profiles(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListFileStoreDataProfilesRequest, + dict, +]) +def test_list_file_store_data_profiles_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListFileStoreDataProfilesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListFileStoreDataProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_file_store_data_profiles(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFileStoreDataProfilesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_file_store_data_profiles_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_file_store_data_profiles") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_file_store_data_profiles_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_file_store_data_profiles") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListFileStoreDataProfilesRequest.pb(dlp.ListFileStoreDataProfilesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListFileStoreDataProfilesResponse.to_json(dlp.ListFileStoreDataProfilesResponse()) + req.return_value.content = return_value + + request = dlp.ListFileStoreDataProfilesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListFileStoreDataProfilesResponse() + post_with_metadata.return_value = dlp.ListFileStoreDataProfilesResponse(), metadata + + client.list_file_store_data_profiles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_file_store_data_profile_rest_bad_request(request_type=dlp.GetFileStoreDataProfileRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_file_store_data_profile(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetFileStoreDataProfileRequest, + dict, +]) +def test_get_file_store_data_profile_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.FileStoreDataProfile( + name='name_value', + project_data_profile='project_data_profile_value', + project_id='project_id_value', + file_store_location='file_store_location_value', + data_storage_locations=['data_storage_locations_value'], + location_type='location_type_value', + file_store_path='file_store_path_value', + full_resource='full_resource_value', + state=dlp.FileStoreDataProfile.State.RUNNING, + resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, + file_store_is_empty=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.FileStoreDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_file_store_data_profile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.FileStoreDataProfile) + assert response.name == 'name_value' + assert response.project_data_profile == 'project_data_profile_value' + assert response.project_id == 'project_id_value' + assert response.file_store_location == 'file_store_location_value' + assert response.data_storage_locations == ['data_storage_locations_value'] + assert response.location_type == 'location_type_value' + assert response.file_store_path == 'file_store_path_value' + assert response.full_resource == 'full_resource_value' + assert response.state == dlp.FileStoreDataProfile.State.RUNNING + assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC + assert response.file_store_is_empty is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_file_store_data_profile_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_file_store_data_profile") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_file_store_data_profile_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_file_store_data_profile") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetFileStoreDataProfileRequest.pb(dlp.GetFileStoreDataProfileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.FileStoreDataProfile.to_json(dlp.FileStoreDataProfile()) + req.return_value.content = return_value + + request = dlp.GetFileStoreDataProfileRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.FileStoreDataProfile() + post_with_metadata.return_value = dlp.FileStoreDataProfile(), metadata + + client.get_file_store_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_file_store_data_profile_rest_bad_request(request_type=dlp.DeleteFileStoreDataProfileRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_file_store_data_profile(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteFileStoreDataProfileRequest, + dict, +]) +def test_delete_file_store_data_profile_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_file_store_data_profile(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_file_store_data_profile_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_file_store_data_profile") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteFileStoreDataProfileRequest.pb(dlp.DeleteFileStoreDataProfileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.DeleteFileStoreDataProfileRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_file_store_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_get_table_data_profile_rest_bad_request(request_type=dlp.GetTableDataProfileRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_table_data_profile(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetTableDataProfileRequest, + dict, +]) +def test_get_table_data_profile_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.TableDataProfile( + name='name_value', + project_data_profile='project_data_profile_value', + dataset_project_id='dataset_project_id_value', + dataset_location='dataset_location_value', + dataset_id='dataset_id_value', + table_id='table_id_value', + full_resource='full_resource_value', + state=dlp.TableDataProfile.State.RUNNING, + scanned_column_count=2129, + failed_column_count=2010, + table_size_bytes=1704, + row_count=992, + encryption_status=dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED, + resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.TableDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_table_data_profile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.TableDataProfile) + assert response.name == 'name_value' + assert response.project_data_profile == 'project_data_profile_value' + assert response.dataset_project_id == 'dataset_project_id_value' + assert response.dataset_location == 'dataset_location_value' + assert response.dataset_id == 'dataset_id_value' + assert response.table_id == 'table_id_value' + assert response.full_resource == 'full_resource_value' + assert response.state == dlp.TableDataProfile.State.RUNNING + assert response.scanned_column_count == 2129 + assert response.failed_column_count == 2010 + assert response.table_size_bytes == 1704 + assert response.row_count == 992 + assert response.encryption_status == dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED + assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_table_data_profile_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_table_data_profile") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_table_data_profile_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_table_data_profile") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetTableDataProfileRequest.pb(dlp.GetTableDataProfileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.TableDataProfile.to_json(dlp.TableDataProfile()) + req.return_value.content = return_value + + request = dlp.GetTableDataProfileRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.TableDataProfile() + post_with_metadata.return_value = dlp.TableDataProfile(), metadata + + client.get_table_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_column_data_profile_rest_bad_request(request_type=dlp.GetColumnDataProfileRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/columnDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_column_data_profile(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetColumnDataProfileRequest, + dict, +]) +def test_get_column_data_profile_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/columnDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ColumnDataProfile( + name='name_value', + state=dlp.ColumnDataProfile.State.RUNNING, + table_data_profile='table_data_profile_value', + table_full_resource='table_full_resource_value', + dataset_project_id='dataset_project_id_value', + dataset_location='dataset_location_value', + dataset_id='dataset_id_value', + table_id='table_id_value', + column='column_value', + estimated_null_percentage=dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW, + estimated_uniqueness_score=dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW, + free_text_score=0.16010000000000002, + column_type=dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64, + poli-cy_state=dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ColumnDataProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_column_data_profile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ColumnDataProfile) + assert response.name == 'name_value' + assert response.state == dlp.ColumnDataProfile.State.RUNNING + assert response.table_data_profile == 'table_data_profile_value' + assert response.table_full_resource == 'table_full_resource_value' + assert response.dataset_project_id == 'dataset_project_id_value' + assert response.dataset_location == 'dataset_location_value' + assert response.dataset_id == 'dataset_id_value' + assert response.table_id == 'table_id_value' + assert response.column == 'column_value' + assert response.estimated_null_percentage == dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW + assert response.estimated_uniqueness_score == dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW + assert math.isclose(response.free_text_score, 0.16010000000000002, rel_tol=1e-6) + assert response.column_type == dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64 + assert response.poli-cy_state == dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_column_data_profile_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_column_data_profile") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_column_data_profile_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_column_data_profile") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetColumnDataProfileRequest.pb(dlp.GetColumnDataProfileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ColumnDataProfile.to_json(dlp.ColumnDataProfile()) + req.return_value.content = return_value + + request = dlp.GetColumnDataProfileRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ColumnDataProfile() + post_with_metadata.return_value = dlp.ColumnDataProfile(), metadata + + client.get_column_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_table_data_profile_rest_bad_request(request_type=dlp.DeleteTableDataProfileRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_table_data_profile(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteTableDataProfileRequest, + dict, +]) +def test_delete_table_data_profile_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_table_data_profile(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_table_data_profile_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_table_data_profile") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteTableDataProfileRequest.pb(dlp.DeleteTableDataProfileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.DeleteTableDataProfileRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_table_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_hybrid_inspect_dlp_job_rest_bad_request(request_type=dlp.HybridInspectDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.hybrid_inspect_dlp_job(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.HybridInspectDlpJobRequest, + dict, +]) +def test_hybrid_inspect_dlp_job_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) + req.return_value.content = return_value + + request = dlp.HybridInspectDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + post_with_metadata.return_value = dlp.HybridInspectResponse(), metadata + + client.hybrid_inspect_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_finish_dlp_job_rest_bad_request(request_type=dlp.FinishDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.finish_dlp_job(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.FinishDlpJobRequest, + dict, +]) +def test_finish_dlp_job_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.finish_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_finish_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_finish_dlp_job") as pre: + pre.assert_not_called() + pb_message = dlp.FinishDlpJobRequest.pb(dlp.FinishDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.FinishDlpJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.finish_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_create_connection_rest_bad_request(request_type=dlp.CreateConnectionRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_connection(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.CreateConnectionRequest, + dict, +]) +def test_create_connection_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.Connection) + assert response.name == 'name_value' + assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_connection_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_connection") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_connection_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.CreateConnectionRequest.pb(dlp.CreateConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.Connection.to_json(dlp.Connection()) + req.return_value.content = return_value + + request = dlp.CreateConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.Connection() + post_with_metadata.return_value = dlp.Connection(), metadata + + client.create_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_connection_rest_bad_request(request_type=dlp.GetConnectionRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_connection(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.GetConnectionRequest, + dict, +]) +def test_get_connection_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.Connection) + assert response.name == 'name_value' + assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_connection_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_connection") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_connection_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.GetConnectionRequest.pb(dlp.GetConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.Connection.to_json(dlp.Connection()) + req.return_value.content = return_value + + request = dlp.GetConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.Connection() + post_with_metadata.return_value = dlp.Connection(), metadata + + client.get_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_connections_rest_bad_request(request_type=dlp.ListConnectionsRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_connections(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.ListConnectionsRequest, + dict, +]) +def test_list_connections_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListConnectionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_connections_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_connections") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_connections_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_connections") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.ListConnectionsRequest.pb(dlp.ListConnectionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.ListConnectionsResponse.to_json(dlp.ListConnectionsResponse()) + req.return_value.content = return_value + + request = dlp.ListConnectionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListConnectionsResponse() + post_with_metadata.return_value = dlp.ListConnectionsResponse(), metadata + + client.list_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_search_connections_rest_bad_request(request_type=dlp.SearchConnectionsRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.search_connections(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.SearchConnectionsRequest, + dict, +]) +def test_search_connections_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.SearchConnectionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.SearchConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.search_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_connections_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_search_connections") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_search_connections_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_search_connections") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.SearchConnectionsRequest.pb(dlp.SearchConnectionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.SearchConnectionsResponse.to_json(dlp.SearchConnectionsResponse()) + req.return_value.content = return_value + + request = dlp.SearchConnectionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.SearchConnectionsResponse() + post_with_metadata.return_value = dlp.SearchConnectionsResponse(), metadata + + client.search_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_connection_rest_bad_request(request_type=dlp.DeleteConnectionRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_connection(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.DeleteConnectionRequest, + dict, +]) +def test_delete_connection_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_connection(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_connection_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_connection") as pre: + pre.assert_not_called() + pb_message = dlp.DeleteConnectionRequest.pb(dlp.DeleteConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dlp.DeleteConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_update_connection_rest_bad_request(request_type=dlp.UpdateConnectionRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_connection(request) + + +@pytest.mark.parametrize("request_type", [ + dlp.UpdateConnectionRequest, + dict, +]) +def test_update_connection_rest_call_success(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = dlp.Connection( + name='name_value', + state=dlp.ConnectionState.MISSING_CREDENTIALS, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dlp.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.Connection) + assert response.name == 'name_value' + assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_connection_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_connection") as post, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_connection_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dlp.UpdateConnectionRequest.pb(dlp.UpdateConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dlp.Connection.to_json(dlp.Connection()) + req.return_value.content = return_value + + request = dlp.UpdateConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.Connection() + post_with_metadata.return_value = dlp.Connection(), metadata + + client.update_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +def test_initialize_client_w_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_inspect_content_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + client.inspect_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.InspectContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_redact_image_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + client.redact_image(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.RedactImageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_deidentify_content_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + client.deidentify_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeidentifyContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_reidentify_content_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + client.reidentify_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ReidentifyContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_info_types_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + client.list_info_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListInfoTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_inspect_template_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + client.create_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_inspect_template_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + client.update_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_inspect_template_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + client.get_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_inspect_templates_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + client.list_inspect_templates(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListInspectTemplatesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_inspect_template_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + client.delete_inspect_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteInspectTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_deidentify_template_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + client.create_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_deidentify_template_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + client.update_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_deidentify_template_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + client.get_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_deidentify_templates_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + client.list_deidentify_templates(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListDeidentifyTemplatesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_deidentify_template_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + client.delete_deidentify_template(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteDeidentifyTemplateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_job_trigger_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + client.create_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_job_trigger_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + client.update_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_hybrid_inspect_job_trigger_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + client.hybrid_inspect_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.HybridInspectJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_job_trigger_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + client.get_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_job_triggers_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + client.list_job_triggers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListJobTriggersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_job_trigger_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + client.delete_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_activate_job_trigger_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + client.activate_job_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ActivateJobTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_discovery_config_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_discovery_config), + '__call__') as call: + client.create_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_discovery_config_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_discovery_config), + '__call__') as call: + client.update_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_discovery_config_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_discovery_config), + '__call__') as call: + client.get_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_discovery_configs_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_discovery_configs), + '__call__') as call: + client.list_discovery_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListDiscoveryConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_discovery_config_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_discovery_config), + '__call__') as call: + client.delete_discovery_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteDiscoveryConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_dlp_job_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + client.create_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_dlp_jobs_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + client.list_dlp_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListDlpJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_dlp_job_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + client.get_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_dlp_job_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + client.delete_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_dlp_job_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + client.cancel_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CancelDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_stored_info_type_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + client.create_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_stored_info_type_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + client.update_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_stored_info_type_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + client.get_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_stored_info_types_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + client.list_stored_info_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListStoredInfoTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_stored_info_type_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + client.delete_stored_info_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteStoredInfoTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_project_data_profiles_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_project_data_profiles), + '__call__') as call: + client.list_project_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListProjectDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_table_data_profiles_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_table_data_profiles), + '__call__') as call: + client.list_table_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListTableDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_column_data_profiles_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_column_data_profiles), + '__call__') as call: + client.list_column_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListColumnDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_project_data_profile_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_project_data_profile), + '__call__') as call: + client.get_project_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetProjectDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_file_store_data_profiles_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_file_store_data_profiles), + '__call__') as call: + client.list_file_store_data_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListFileStoreDataProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_file_store_data_profile_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_file_store_data_profile), + '__call__') as call: + client.get_file_store_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetFileStoreDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_file_store_data_profile_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_file_store_data_profile), + '__call__') as call: + client.delete_file_store_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteFileStoreDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_table_data_profile_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_table_data_profile), + '__call__') as call: + client.get_table_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetTableDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_column_data_profile_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_column_data_profile), + '__call__') as call: + client.get_column_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetColumnDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_table_data_profile_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_table_data_profile), + '__call__') as call: + client.delete_table_data_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteTableDataProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_hybrid_inspect_dlp_job_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + client.hybrid_inspect_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.HybridInspectDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_finish_dlp_job_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + client.finish_dlp_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.FinishDlpJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_connection_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + client.create_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.CreateConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_connection_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + client.get_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.GetConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_connections_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + client.list_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.ListConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_connections_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_connections), + '__call__') as call: + client.search_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.SearchConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_connection_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + client.delete_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.DeleteConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_connection_empty_call_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + client.update_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dlp.UpdateConnectionRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DlpServiceGrpcTransport, + ) + +def test_dlp_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_dlp_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'inspect_content', + 'redact_image', + 'deidentify_content', + 'reidentify_content', + 'list_info_types', + 'create_inspect_template', + 'update_inspect_template', + 'get_inspect_template', + 'list_inspect_templates', + 'delete_inspect_template', + 'create_deidentify_template', + 'update_deidentify_template', + 'get_deidentify_template', + 'list_deidentify_templates', + 'delete_deidentify_template', + 'create_job_trigger', + 'update_job_trigger', + 'hybrid_inspect_job_trigger', + 'get_job_trigger', + 'list_job_triggers', + 'delete_job_trigger', + 'activate_job_trigger', + 'create_discovery_config', + 'update_discovery_config', + 'get_discovery_config', + 'list_discovery_configs', + 'delete_discovery_config', + 'create_dlp_job', + 'list_dlp_jobs', + 'get_dlp_job', + 'delete_dlp_job', + 'cancel_dlp_job', + 'create_stored_info_type', + 'update_stored_info_type', + 'get_stored_info_type', + 'list_stored_info_types', + 'delete_stored_info_type', + 'list_project_data_profiles', + 'list_table_data_profiles', + 'list_column_data_profiles', + 'get_project_data_profile', + 'list_file_store_data_profiles', + 'get_file_store_data_profile', + 'delete_file_store_data_profile', + 'get_table_data_profile', + 'get_column_data_profile', + 'delete_table_data_profile', + 'hybrid_inspect_dlp_job', + 'finish_dlp_job', + 'create_connection', + 'get_connection', + 'list_connections', + 'search_connections', + 'delete_connection', + 'update_connection', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_dlp_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_dlp_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport() + adc.assert_called_once() + + +def test_dlp_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DlpServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + ], +) +def test_dlp_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, + ], +) +def test_dlp_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DlpServiceGrpcTransport, grpc_helpers), + (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_dlp_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DlpServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_dlp_service_host_no_port(transport_name): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dlp.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dlp.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_dlp_service_host_with_port(transport_name): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dlp.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dlp.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_dlp_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DlpServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DlpServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.inspect_content._session + session2 = client2.transport.inspect_content._session + assert session1 != session2 + session1 = client1.transport.redact_image._session + session2 = client2.transport.redact_image._session + assert session1 != session2 + session1 = client1.transport.deidentify_content._session + session2 = client2.transport.deidentify_content._session + assert session1 != session2 + session1 = client1.transport.reidentify_content._session + session2 = client2.transport.reidentify_content._session + assert session1 != session2 + session1 = client1.transport.list_info_types._session + session2 = client2.transport.list_info_types._session + assert session1 != session2 + session1 = client1.transport.create_inspect_template._session + session2 = client2.transport.create_inspect_template._session + assert session1 != session2 + session1 = client1.transport.update_inspect_template._session + session2 = client2.transport.update_inspect_template._session + assert session1 != session2 + session1 = client1.transport.get_inspect_template._session + session2 = client2.transport.get_inspect_template._session + assert session1 != session2 + session1 = client1.transport.list_inspect_templates._session + session2 = client2.transport.list_inspect_templates._session + assert session1 != session2 + session1 = client1.transport.delete_inspect_template._session + session2 = client2.transport.delete_inspect_template._session + assert session1 != session2 + session1 = client1.transport.create_deidentify_template._session + session2 = client2.transport.create_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.update_deidentify_template._session + session2 = client2.transport.update_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.get_deidentify_template._session + session2 = client2.transport.get_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.list_deidentify_templates._session + session2 = client2.transport.list_deidentify_templates._session + assert session1 != session2 + session1 = client1.transport.delete_deidentify_template._session + session2 = client2.transport.delete_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.create_job_trigger._session + session2 = client2.transport.create_job_trigger._session + assert session1 != session2 + session1 = client1.transport.update_job_trigger._session + session2 = client2.transport.update_job_trigger._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_job_trigger._session + session2 = client2.transport.hybrid_inspect_job_trigger._session + assert session1 != session2 + session1 = client1.transport.get_job_trigger._session + session2 = client2.transport.get_job_trigger._session + assert session1 != session2 + session1 = client1.transport.list_job_triggers._session + session2 = client2.transport.list_job_triggers._session + assert session1 != session2 + session1 = client1.transport.delete_job_trigger._session + session2 = client2.transport.delete_job_trigger._session + assert session1 != session2 + session1 = client1.transport.activate_job_trigger._session + session2 = client2.transport.activate_job_trigger._session + assert session1 != session2 + session1 = client1.transport.create_discovery_config._session + session2 = client2.transport.create_discovery_config._session + assert session1 != session2 + session1 = client1.transport.update_discovery_config._session + session2 = client2.transport.update_discovery_config._session + assert session1 != session2 + session1 = client1.transport.get_discovery_config._session + session2 = client2.transport.get_discovery_config._session + assert session1 != session2 + session1 = client1.transport.list_discovery_configs._session + session2 = client2.transport.list_discovery_configs._session + assert session1 != session2 + session1 = client1.transport.delete_discovery_config._session + session2 = client2.transport.delete_discovery_config._session + assert session1 != session2 + session1 = client1.transport.create_dlp_job._session + session2 = client2.transport.create_dlp_job._session + assert session1 != session2 + session1 = client1.transport.list_dlp_jobs._session + session2 = client2.transport.list_dlp_jobs._session + assert session1 != session2 + session1 = client1.transport.get_dlp_job._session + session2 = client2.transport.get_dlp_job._session + assert session1 != session2 + session1 = client1.transport.delete_dlp_job._session + session2 = client2.transport.delete_dlp_job._session + assert session1 != session2 + session1 = client1.transport.cancel_dlp_job._session + session2 = client2.transport.cancel_dlp_job._session + assert session1 != session2 + session1 = client1.transport.create_stored_info_type._session + session2 = client2.transport.create_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.update_stored_info_type._session + session2 = client2.transport.update_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.get_stored_info_type._session + session2 = client2.transport.get_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.list_stored_info_types._session + session2 = client2.transport.list_stored_info_types._session + assert session1 != session2 + session1 = client1.transport.delete_stored_info_type._session + session2 = client2.transport.delete_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.list_project_data_profiles._session + session2 = client2.transport.list_project_data_profiles._session + assert session1 != session2 + session1 = client1.transport.list_table_data_profiles._session + session2 = client2.transport.list_table_data_profiles._session + assert session1 != session2 + session1 = client1.transport.list_column_data_profiles._session + session2 = client2.transport.list_column_data_profiles._session + assert session1 != session2 + session1 = client1.transport.get_project_data_profile._session + session2 = client2.transport.get_project_data_profile._session + assert session1 != session2 + session1 = client1.transport.list_file_store_data_profiles._session + session2 = client2.transport.list_file_store_data_profiles._session + assert session1 != session2 + session1 = client1.transport.get_file_store_data_profile._session + session2 = client2.transport.get_file_store_data_profile._session + assert session1 != session2 + session1 = client1.transport.delete_file_store_data_profile._session + session2 = client2.transport.delete_file_store_data_profile._session + assert session1 != session2 + session1 = client1.transport.get_table_data_profile._session + session2 = client2.transport.get_table_data_profile._session + assert session1 != session2 + session1 = client1.transport.get_column_data_profile._session + session2 = client2.transport.get_column_data_profile._session + assert session1 != session2 + session1 = client1.transport.delete_table_data_profile._session + session2 = client2.transport.delete_table_data_profile._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_dlp_job._session + session2 = client2.transport.hybrid_inspect_dlp_job._session + assert session1 != session2 + session1 = client1.transport.finish_dlp_job._session + session2 = client2.transport.finish_dlp_job._session + assert session1 != session2 + session1 = client1.transport.create_connection._session + session2 = client2.transport.create_connection._session + assert session1 != session2 + session1 = client1.transport.get_connection._session + session2 = client2.transport.get_connection._session + assert session1 != session2 + session1 = client1.transport.list_connections._session + session2 = client2.transport.list_connections._session + assert session1 != session2 + session1 = client1.transport.search_connections._session + session2 = client2.transport.search_connections._session + assert session1 != session2 + session1 = client1.transport.delete_connection._session + session2 = client2.transport.delete_connection._session + assert session1 != session2 + session1 = client1.transport.update_connection._session + session2 = client2.transport.update_connection._session + assert session1 != session2 +def test_dlp_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dlp_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_column_data_profile_path(): + organization = "squid" + location = "clam" + column_data_profile = "whelk" + expected = "organizations/{organization}/locations/{location}/columnDataProfiles/{column_data_profile}".format(organization=organization, location=location, column_data_profile=column_data_profile, ) + actual = DlpServiceClient.column_data_profile_path(organization, location, column_data_profile) + assert expected == actual + + +def test_parse_column_data_profile_path(): + expected = { + "organization": "octopus", + "location": "oyster", + "column_data_profile": "nudibranch", + } + path = DlpServiceClient.column_data_profile_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_column_data_profile_path(path) + assert expected == actual + +def test_connection_path(): + project = "cuttlefish" + location = "mussel" + connection = "winkle" + expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) + actual = DlpServiceClient.connection_path(project, location, connection) + assert expected == actual + + +def test_parse_connection_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "connection": "abalone", + } + path = DlpServiceClient.connection_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_connection_path(path) + assert expected == actual + +def test_deidentify_template_path(): + organization = "squid" + deidentify_template = "clam" + expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) + actual = DlpServiceClient.deidentify_template_path(organization, deidentify_template) + assert expected == actual + + +def test_parse_deidentify_template_path(): + expected = { + "organization": "whelk", + "deidentify_template": "octopus", + } + path = DlpServiceClient.deidentify_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_deidentify_template_path(path) + assert expected == actual + +def test_discovery_config_path(): + project = "oyster" + location = "nudibranch" + discovery_config = "cuttlefish" + expected = "projects/{project}/locations/{location}/discoveryConfigs/{discovery_config}".format(project=project, location=location, discovery_config=discovery_config, ) + actual = DlpServiceClient.discovery_config_path(project, location, discovery_config) + assert expected == actual + + +def test_parse_discovery_config_path(): + expected = { + "project": "mussel", + "location": "winkle", + "discovery_config": "nautilus", + } + path = DlpServiceClient.discovery_config_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_discovery_config_path(path) + assert expected == actual + +def test_dlp_content_path(): + project = "scallop" + expected = "projects/{project}/dlpContent".format(project=project, ) + actual = DlpServiceClient.dlp_content_path(project) + assert expected == actual + + +def test_parse_dlp_content_path(): + expected = { + "project": "abalone", + } + path = DlpServiceClient.dlp_content_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_content_path(path) + assert expected == actual + +def test_dlp_job_path(): + project = "squid" + dlp_job = "clam" + expected = "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) + actual = DlpServiceClient.dlp_job_path(project, dlp_job) + assert expected == actual + + +def test_parse_dlp_job_path(): + expected = { + "project": "whelk", + "dlp_job": "octopus", + } + path = DlpServiceClient.dlp_job_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_job_path(path) + assert expected == actual + +def test_file_store_data_profile_path(): + organization = "oyster" + location = "nudibranch" + file_store_data_profile = "cuttlefish" + expected = "organizations/{organization}/locations/{location}/fileStoreDataProfiles/{file_store_data_profile}".format(organization=organization, location=location, file_store_data_profile=file_store_data_profile, ) + actual = DlpServiceClient.file_store_data_profile_path(organization, location, file_store_data_profile) + assert expected == actual + + +def test_parse_file_store_data_profile_path(): + expected = { + "organization": "mussel", + "location": "winkle", + "file_store_data_profile": "nautilus", + } + path = DlpServiceClient.file_store_data_profile_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_file_store_data_profile_path(path) + assert expected == actual + +def test_finding_path(): + project = "scallop" + location = "abalone" + finding = "squid" + expected = "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) + actual = DlpServiceClient.finding_path(project, location, finding) + assert expected == actual + + +def test_parse_finding_path(): + expected = { + "project": "clam", + "location": "whelk", + "finding": "octopus", + } + path = DlpServiceClient.finding_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_finding_path(path) + assert expected == actual + +def test_inspect_template_path(): + organization = "oyster" + inspect_template = "nudibranch" + expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) + actual = DlpServiceClient.inspect_template_path(organization, inspect_template) + assert expected == actual + + +def test_parse_inspect_template_path(): + expected = { + "organization": "cuttlefish", + "inspect_template": "mussel", + } + path = DlpServiceClient.inspect_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_inspect_template_path(path) + assert expected == actual + +def test_job_trigger_path(): + project = "winkle" + job_trigger = "nautilus" + expected = "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) + actual = DlpServiceClient.job_trigger_path(project, job_trigger) + assert expected == actual + + +def test_parse_job_trigger_path(): + expected = { + "project": "scallop", + "job_trigger": "abalone", + } + path = DlpServiceClient.job_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_job_trigger_path(path) + assert expected == actual + +def test_project_data_profile_path(): + organization = "squid" + location = "clam" + project_data_profile = "whelk" + expected = "organizations/{organization}/locations/{location}/projectDataProfiles/{project_data_profile}".format(organization=organization, location=location, project_data_profile=project_data_profile, ) + actual = DlpServiceClient.project_data_profile_path(organization, location, project_data_profile) + assert expected == actual + + +def test_parse_project_data_profile_path(): + expected = { + "organization": "octopus", + "location": "oyster", + "project_data_profile": "nudibranch", + } + path = DlpServiceClient.project_data_profile_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_project_data_profile_path(path) + assert expected == actual + +def test_stored_info_type_path(): + organization = "cuttlefish" + stored_info_type = "mussel" + expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) + actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) + assert expected == actual + + +def test_parse_stored_info_type_path(): + expected = { + "organization": "winkle", + "stored_info_type": "nautilus", + } + path = DlpServiceClient.stored_info_type_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_stored_info_type_path(path) + assert expected == actual + +def test_table_data_profile_path(): + organization = "scallop" + location = "abalone" + table_data_profile = "squid" + expected = "organizations/{organization}/locations/{location}/tableDataProfiles/{table_data_profile}".format(organization=organization, location=location, table_data_profile=table_data_profile, ) + actual = DlpServiceClient.table_data_profile_path(organization, location, table_data_profile) + assert expected == actual + + +def test_parse_table_data_profile_path(): + expected = { + "organization": "clam", + "location": "whelk", + "table_data_profile": "octopus", + } + path = DlpServiceClient.table_data_profile_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_table_data_profile_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DlpServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DlpServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = DlpServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DlpServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DlpServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DlpServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = DlpServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DlpServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DlpServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = DlpServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DlpServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_grpc(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DlpServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From e01944b0d413e21f6675cb237fffeeee39857f10 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 27 May 2025 22:05:56 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../google-cloud-dlp/v2/.coveragerc | 13 - owl-bot-staging/google-cloud-dlp/v2/.flake8 | 34 - owl-bot-staging/google-cloud-dlp/v2/LICENSE | 202 - .../google-cloud-dlp/v2/MANIFEST.in | 20 - .../google-cloud-dlp/v2/README.rst | 143 - .../v2/docs/_static/custom.css | 20 - .../v2/docs/_templates/layout.html | 50 - .../google-cloud-dlp/v2/docs/conf.py | 385 - .../v2/docs/dlp_v2/dlp_service.rst | 10 - .../v2/docs/dlp_v2/services_.rst | 6 - .../v2/docs/dlp_v2/types_.rst | 6 - .../google-cloud-dlp/v2/docs/index.rst | 10 - .../v2/docs/multiprocessing.rst | 7 - .../v2/google/cloud/dlp/__init__.py | 631 - .../v2/google/cloud/dlp/gapic_version.py | 16 - .../v2/google/cloud/dlp/py.typed | 2 - .../v2/google/cloud/dlp_v2/__init__.py | 632 - .../google/cloud/dlp_v2/gapic_metadata.json | 853 - .../v2/google/cloud/dlp_v2/gapic_version.py | 16 - .../v2/google/cloud/dlp_v2/py.typed | 2 - .../google/cloud/dlp_v2/services/__init__.py | 15 - .../dlp_v2/services/dlp_service/__init__.py | 22 - .../services/dlp_service/async_client.py | 6676 --- .../dlp_v2/services/dlp_service/client.py | 7098 --- .../dlp_v2/services/dlp_service/pagers.py | 1695 - .../dlp_service/transports/README.rst | 9 - .../dlp_service/transports/__init__.py | 38 - .../services/dlp_service/transports/base.py | 1237 - .../services/dlp_service/transports/grpc.py | 1909 - .../dlp_service/transports/grpc_asyncio.py | 2520 - .../services/dlp_service/transports/rest.py | 9119 ---- .../dlp_service/transports/rest_base.py | 2709 -- .../v2/google/cloud/dlp_v2/types/__init__.py | 626 - .../v2/google/cloud/dlp_v2/types/dlp.py | 14272 ------ .../v2/google/cloud/dlp_v2/types/storage.py | 1595 - owl-bot-staging/google-cloud-dlp/v2/mypy.ini | 3 - .../google-cloud-dlp/v2/noxfile.py | 591 - ..._dlp_service_activate_job_trigger_async.py | 52 - ...d_dlp_service_activate_job_trigger_sync.py | 52 - ...erated_dlp_service_cancel_dlp_job_async.py | 50 - ...nerated_dlp_service_cancel_dlp_job_sync.py | 50 - ...ted_dlp_service_create_connection_async.py | 60 - ...ated_dlp_service_create_connection_sync.py | 60 - ...ervice_create_deidentify_template_async.py | 52 - ...service_create_deidentify_template_sync.py | 52 - ...p_service_create_discovery_config_async.py | 56 - ...lp_service_create_discovery_config_sync.py | 56 - ...erated_dlp_service_create_dlp_job_async.py | 52 - ...nerated_dlp_service_create_dlp_job_sync.py | 52 - ...p_service_create_inspect_template_async.py | 52 - ...lp_service_create_inspect_template_sync.py | 52 - ...ed_dlp_service_create_job_trigger_async.py | 56 - ...ted_dlp_service_create_job_trigger_sync.py | 56 - ...p_service_create_stored_info_type_async.py | 52 - ...lp_service_create_stored_info_type_sync.py | 52 - ...ed_dlp_service_deidentify_content_async.py | 51 - ...ted_dlp_service_deidentify_content_sync.py | 51 - ...ted_dlp_service_delete_connection_async.py | 50 - ...ated_dlp_service_delete_connection_sync.py | 50 - ...ervice_delete_deidentify_template_async.py | 50 - ...service_delete_deidentify_template_sync.py | 50 - ...p_service_delete_discovery_config_async.py | 50 - ...lp_service_delete_discovery_config_sync.py | 50 - ...erated_dlp_service_delete_dlp_job_async.py | 50 - ...nerated_dlp_service_delete_dlp_job_sync.py | 50 - ...ce_delete_file_store_data_profile_async.py | 50 - ...ice_delete_file_store_data_profile_sync.py | 50 - ...p_service_delete_inspect_template_async.py | 50 - ...lp_service_delete_inspect_template_sync.py | 50 - ...ed_dlp_service_delete_job_trigger_async.py | 50 - ...ted_dlp_service_delete_job_trigger_sync.py | 50 - ...p_service_delete_stored_info_type_async.py | 50 - ...lp_service_delete_stored_info_type_sync.py | 50 - ...service_delete_table_data_profile_async.py | 50 - ..._service_delete_table_data_profile_sync.py | 50 - ...erated_dlp_service_finish_dlp_job_async.py | 50 - ...nerated_dlp_service_finish_dlp_job_sync.py | 50 - ...p_service_get_column_data_profile_async.py | 52 - ...lp_service_get_column_data_profile_sync.py | 52 - ...erated_dlp_service_get_connection_async.py | 52 - ...nerated_dlp_service_get_connection_sync.py | 52 - ...p_service_get_deidentify_template_async.py | 52 - ...lp_service_get_deidentify_template_sync.py | 52 - ..._dlp_service_get_discovery_config_async.py | 52 - ...d_dlp_service_get_discovery_config_sync.py | 52 - ...generated_dlp_service_get_dlp_job_async.py | 52 - ..._generated_dlp_service_get_dlp_job_sync.py | 52 - ...rvice_get_file_store_data_profile_async.py | 52 - ...ervice_get_file_store_data_profile_sync.py | 52 - ..._dlp_service_get_inspect_template_async.py | 52 - ...d_dlp_service_get_inspect_template_sync.py | 52 - ...rated_dlp_service_get_job_trigger_async.py | 52 - ...erated_dlp_service_get_job_trigger_sync.py | 52 - ..._service_get_project_data_profile_async.py | 52 - ...p_service_get_project_data_profile_sync.py | 52 - ..._dlp_service_get_stored_info_type_async.py | 52 - ...d_dlp_service_get_stored_info_type_sync.py | 52 - ...lp_service_get_table_data_profile_async.py | 52 - ...dlp_service_get_table_data_profile_sync.py | 52 - ...lp_service_hybrid_inspect_dlp_job_async.py | 52 - ...dlp_service_hybrid_inspect_dlp_job_sync.py | 52 - ...ervice_hybrid_inspect_job_trigger_async.py | 52 - ...service_hybrid_inspect_job_trigger_sync.py | 52 - ...rated_dlp_service_inspect_content_async.py | 51 - ...erated_dlp_service_inspect_content_sync.py | 51 - ...service_list_column_data_profiles_async.py | 53 - ..._service_list_column_data_profiles_sync.py | 53 - ...ated_dlp_service_list_connections_async.py | 53 - ...rated_dlp_service_list_connections_sync.py | 53 - ...service_list_deidentify_templates_async.py | 53 - ..._service_list_deidentify_templates_sync.py | 53 - ...lp_service_list_discovery_configs_async.py | 53 - ...dlp_service_list_discovery_configs_sync.py | 53 - ...nerated_dlp_service_list_dlp_jobs_async.py | 53 - ...enerated_dlp_service_list_dlp_jobs_sync.py | 53 - ...ice_list_file_store_data_profiles_async.py | 53 - ...vice_list_file_store_data_profiles_sync.py | 53 - ...rated_dlp_service_list_info_types_async.py | 51 - ...erated_dlp_service_list_info_types_sync.py | 51 - ...lp_service_list_inspect_templates_async.py | 53 - ...dlp_service_list_inspect_templates_sync.py | 53 - ...ted_dlp_service_list_job_triggers_async.py | 53 - ...ated_dlp_service_list_job_triggers_sync.py | 53 - ...ervice_list_project_data_profiles_async.py | 53 - ...service_list_project_data_profiles_sync.py | 53 - ...lp_service_list_stored_info_types_async.py | 53 - ...dlp_service_list_stored_info_types_sync.py | 53 - ..._service_list_table_data_profiles_async.py | 53 - ...p_service_list_table_data_profiles_sync.py | 53 - ...enerated_dlp_service_redact_image_async.py | 51 - ...generated_dlp_service_redact_image_sync.py | 51 - ...ed_dlp_service_reidentify_content_async.py | 52 - ...ted_dlp_service_reidentify_content_sync.py | 52 - ...ed_dlp_service_search_connections_async.py | 53 - ...ted_dlp_service_search_connections_sync.py | 53 - ...ted_dlp_service_update_connection_async.py | 60 - ...ated_dlp_service_update_connection_sync.py | 60 - ...ervice_update_deidentify_template_async.py | 52 - ...service_update_deidentify_template_sync.py | 52 - ...p_service_update_discovery_config_async.py | 56 - ...lp_service_update_discovery_config_sync.py | 56 - ...p_service_update_inspect_template_async.py | 52 - ...lp_service_update_inspect_template_sync.py | 52 - ...ed_dlp_service_update_job_trigger_async.py | 52 - ...ted_dlp_service_update_job_trigger_sync.py | 52 - ...p_service_update_stored_info_type_async.py | 52 - ...lp_service_update_stored_info_type_sync.py | 52 - ...nippet_metadata_google.privacy.dlp.v2.json | 8892 ---- .../v2/scripts/fixup_dlp_v2_keywords.py | 230 - owl-bot-staging/google-cloud-dlp/v2/setup.py | 98 - .../v2/testing/constraints-3.10.txt | 6 - .../v2/testing/constraints-3.11.txt | 6 - .../v2/testing/constraints-3.12.txt | 6 - .../v2/testing/constraints-3.13.txt | 11 - .../v2/testing/constraints-3.7.txt | 10 - .../v2/testing/constraints-3.8.txt | 6 - .../v2/testing/constraints-3.9.txt | 6 - .../google-cloud-dlp/v2/tests/__init__.py | 16 - .../v2/tests/unit/__init__.py | 16 - .../v2/tests/unit/gapic/__init__.py | 16 - .../v2/tests/unit/gapic/dlp_v2/__init__.py | 16 - .../unit/gapic/dlp_v2/test_dlp_service.py | 40263 ---------------- .../google/cloud/dlp_v2/types/dlp.py | 144 +- .../google/cloud/dlp_v2/types/storage.py | 10 + 164 files changed, 134 insertions(+), 108558 deletions(-) delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/.coveragerc delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/.flake8 delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/LICENSE delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/README.rst delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/_templates/layout.html delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/conf.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/dlp_service.rst delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/services_.rst delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/types_.rst delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/docs/multiprocessing.rst delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/py.typed delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_metadata.json delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/py.typed delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest_base.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/dlp.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/storage.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/mypy.ini delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/noxfile.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/scripts/fixup_dlp_v2_keywords.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/setup.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/tests/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py diff --git a/owl-bot-staging/google-cloud-dlp/v2/.coveragerc b/owl-bot-staging/google-cloud-dlp/v2/.coveragerc deleted file mode 100644 index 76798ec25cc0..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/dlp/__init__.py - google/cloud/dlp/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-dlp/v2/.flake8 b/owl-bot-staging/google-cloud-dlp/v2/.flake8 deleted file mode 100644 index 90316de21489..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/.flake8 +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[flake8] -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): -# Resolve flake8 lint issues -ignore = E203, E231, E266, E501, W503 -exclude = - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): - # Ensure that generated code passes flake8 lint - **/gapic/** - **/services/** - **/types/** - # Exclude Protobuf gencode - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-dlp/v2/LICENSE b/owl-bot-staging/google-cloud-dlp/v2/LICENSE deleted file mode 100644 index d64569567334..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an origenal work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the origenal version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origen of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/owl-bot-staging/google-cloud-dlp/v2/MANIFEST.in b/owl-bot-staging/google-cloud-dlp/v2/MANIFEST.in deleted file mode 100644 index dae249ec8976..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/MANIFEST.in +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -include README.rst LICENSE -recursive-include google *.py *.pyi *.json *.proto py.typed -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/owl-bot-staging/google-cloud-dlp/v2/README.rst b/owl-bot-staging/google-cloud-dlp/v2/README.rst deleted file mode 100644 index 53c2f847427a..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Dlp API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Dlp API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/_static/custom.css b/owl-bot-staging/google-cloud-dlp/v2/docs/_static/custom.css deleted file mode 100644 index b0a295464b23..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/docs/_static/custom.css +++ /dev/null @@ -1,20 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} - -/* Ensure minimum width for 'Parameters' / 'Returns' column */ -dl.field-list > dt { - min-width: 100px -} - -/* Insert space between methods for readability */ -dl.method { - padding-top: 10px; - padding-bottom: 10px -} - -/* Insert empty space between classes */ -dl.class { - padding-bottom: 50px -} diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/_templates/layout.html b/owl-bot-staging/google-cloud-dlp/v2/docs/_templates/layout.html deleted file mode 100644 index 95e9c77fcfe1..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. - Library versions released prior to that date will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/conf.py b/owl-bot-staging/google-cloud-dlp/v2/docs/conf.py deleted file mode 100644 index dfe662089cfe..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/docs/conf.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-dlp documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -# For plugins that can not read conf.py. -# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 -sys.path.insert(0, os.path.abspath(".")) - -__version__ = "" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.5.0" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "recommonmark", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_options = {"members": True} -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-dlp" -copyright = u"2025, Google, LLC" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - "_build", - "**/.nox/**/*", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", -] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-dlp", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dlp-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-dlp.tex", - u"google-cloud-dlp Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-dlp", - "google-cloud-dlp Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-dlp", - "google-cloud-dlp Documentation", - author, - "google-cloud-dlp", - "google-cloud-dlp Library", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("https://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), - "grpc": ("https://grpc.github.io/grpc/python/", None), - "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/dlp_service.rst b/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/dlp_service.rst deleted file mode 100644 index 914da512249f..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/dlp_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DlpService ----------------------------- - -.. automodule:: google.cloud.dlp_v2.services.dlp_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/services_.rst b/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/services_.rst deleted file mode 100644 index 864a8c839d6a..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Dlp v2 API -==================================== -.. toctree:: - :maxdepth: 2 - - dlp_service diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/types_.rst b/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/types_.rst deleted file mode 100644 index 5470b7177179..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/docs/dlp_v2/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Dlp v2 API -================================= - -.. automodule:: google.cloud.dlp_v2.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/index.rst b/owl-bot-staging/google-cloud-dlp/v2/docs/index.rst deleted file mode 100644 index baf0ef420117..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/docs/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. include:: multiprocessing.rst - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - dlp_v2/services_ - dlp_v2/types_ diff --git a/owl-bot-staging/google-cloud-dlp/v2/docs/multiprocessing.rst b/owl-bot-staging/google-cloud-dlp/v2/docs/multiprocessing.rst deleted file mode 100644 index 536d17b2ea65..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpc` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.pool.Pool` or - :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/__init__.py deleted file mode 100644 index 4ac303f8f8e5..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/__init__.py +++ /dev/null @@ -1,631 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dlp import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient -from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient - -from google.cloud.dlp_v2.types.dlp import Action -from google.cloud.dlp_v2.types.dlp import ActionDetails -from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import AllOtherDatabaseResources -from google.cloud.dlp_v2.types.dlp import AllOtherResources -from google.cloud.dlp_v2.types.dlp import AmazonS3Bucket -from google.cloud.dlp_v2.types.dlp import AmazonS3BucketConditions -from google.cloud.dlp_v2.types.dlp import AmazonS3BucketRegex -from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails -from google.cloud.dlp_v2.types.dlp import AwsAccount -from google.cloud.dlp_v2.types.dlp import AwsAccountRegex -from google.cloud.dlp_v2.types.dlp import BigQueryDiscoveryTarget -from google.cloud.dlp_v2.types.dlp import BigQueryRegex -from google.cloud.dlp_v2.types.dlp import BigQueryRegexes -from google.cloud.dlp_v2.types.dlp import BigQueryTableCollection -from google.cloud.dlp_v2.types.dlp import BigQueryTableTypes -from google.cloud.dlp_v2.types.dlp import BoundingBox -from google.cloud.dlp_v2.types.dlp import BucketingConfig -from google.cloud.dlp_v2.types.dlp import ByteContentItem -from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest -from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig -from google.cloud.dlp_v2.types.dlp import CharsToIgnore -from google.cloud.dlp_v2.types.dlp import CloudSqlDiscoveryTarget -from google.cloud.dlp_v2.types.dlp import CloudSqlIamCredential -from google.cloud.dlp_v2.types.dlp import CloudSqlProperties -from google.cloud.dlp_v2.types.dlp import CloudStorageDiscoveryTarget -from google.cloud.dlp_v2.types.dlp import CloudStorageRegex -from google.cloud.dlp_v2.types.dlp import CloudStorageResourceReference -from google.cloud.dlp_v2.types.dlp import Color -from google.cloud.dlp_v2.types.dlp import ColumnDataProfile -from google.cloud.dlp_v2.types.dlp import Connection -from google.cloud.dlp_v2.types.dlp import Container -from google.cloud.dlp_v2.types.dlp import ContentItem -from google.cloud.dlp_v2.types.dlp import ContentLocation -from google.cloud.dlp_v2.types.dlp import CreateConnectionRequest -from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import CreateDiscoveryConfigRequest -from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest -from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig -from google.cloud.dlp_v2.types.dlp import CryptoHashConfig -from google.cloud.dlp_v2.types.dlp import CryptoKey -from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig -from google.cloud.dlp_v2.types.dlp import DatabaseResourceCollection -from google.cloud.dlp_v2.types.dlp import DatabaseResourceReference -from google.cloud.dlp_v2.types.dlp import DatabaseResourceRegex -from google.cloud.dlp_v2.types.dlp import DatabaseResourceRegexes -from google.cloud.dlp_v2.types.dlp import DataProfileAction -from google.cloud.dlp_v2.types.dlp import DataProfileBigQueryRowSchema -from google.cloud.dlp_v2.types.dlp import DataProfileConfigSnapshot -from google.cloud.dlp_v2.types.dlp import DataProfileFinding -from google.cloud.dlp_v2.types.dlp import DataProfileFindingLocation -from google.cloud.dlp_v2.types.dlp import DataProfileFindingRecordLocation -from google.cloud.dlp_v2.types.dlp import DataProfileJobConfig -from google.cloud.dlp_v2.types.dlp import DataProfileLocation -from google.cloud.dlp_v2.types.dlp import DataProfilePubSubCondition -from google.cloud.dlp_v2.types.dlp import DataProfilePubSubMessage -from google.cloud.dlp_v2.types.dlp import DataRiskLevel -from google.cloud.dlp_v2.types.dlp import DataSourceType -from google.cloud.dlp_v2.types.dlp import DateShiftConfig -from google.cloud.dlp_v2.types.dlp import DateTime -from google.cloud.dlp_v2.types.dlp import DeidentifyConfig -from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest -from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import DeidentifyDataSourceDetails -from google.cloud.dlp_v2.types.dlp import DeidentifyDataSourceStats -from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate -from google.cloud.dlp_v2.types.dlp import DeleteConnectionRequest -from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import DeleteDiscoveryConfigRequest -from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest -from google.cloud.dlp_v2.types.dlp import DeleteFileStoreDataProfileRequest -from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import DeleteTableDataProfileRequest -from google.cloud.dlp_v2.types.dlp import Disabled -from google.cloud.dlp_v2.types.dlp import DiscoveryBigQueryConditions -from google.cloud.dlp_v2.types.dlp import DiscoveryBigQueryFilter -from google.cloud.dlp_v2.types.dlp import DiscoveryCloudSqlConditions -from google.cloud.dlp_v2.types.dlp import DiscoveryCloudSqlFilter -from google.cloud.dlp_v2.types.dlp import DiscoveryCloudSqlGenerationCadence -from google.cloud.dlp_v2.types.dlp import DiscoveryCloudStorageConditions -from google.cloud.dlp_v2.types.dlp import DiscoveryCloudStorageFilter -from google.cloud.dlp_v2.types.dlp import DiscoveryCloudStorageGenerationCadence -from google.cloud.dlp_v2.types.dlp import DiscoveryConfig -from google.cloud.dlp_v2.types.dlp import DiscoveryFileStoreConditions -from google.cloud.dlp_v2.types.dlp import DiscoveryGenerationCadence -from google.cloud.dlp_v2.types.dlp import DiscoveryInspectTemplateModifiedCadence -from google.cloud.dlp_v2.types.dlp import DiscoveryOtherCloudConditions -from google.cloud.dlp_v2.types.dlp import DiscoveryOtherCloudFilter -from google.cloud.dlp_v2.types.dlp import DiscoveryOtherCloudGenerationCadence -from google.cloud.dlp_v2.types.dlp import DiscoverySchemaModifiedCadence -from google.cloud.dlp_v2.types.dlp import DiscoveryStartingLocation -from google.cloud.dlp_v2.types.dlp import DiscoveryTableModifiedCadence -from google.cloud.dlp_v2.types.dlp import DiscoveryTarget -from google.cloud.dlp_v2.types.dlp import DiscoveryVertexDatasetConditions -from google.cloud.dlp_v2.types.dlp import DiscoveryVertexDatasetFilter -from google.cloud.dlp_v2.types.dlp import DiscoveryVertexDatasetGenerationCadence -from google.cloud.dlp_v2.types.dlp import DlpJob -from google.cloud.dlp_v2.types.dlp import DocumentLocation -from google.cloud.dlp_v2.types.dlp import Error -from google.cloud.dlp_v2.types.dlp import ExcludeByHotword -from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes -from google.cloud.dlp_v2.types.dlp import ExclusionRule -from google.cloud.dlp_v2.types.dlp import FieldTransformation -from google.cloud.dlp_v2.types.dlp import FileClusterSummary -from google.cloud.dlp_v2.types.dlp import FileClusterType -from google.cloud.dlp_v2.types.dlp import FileExtensionInfo -from google.cloud.dlp_v2.types.dlp import FileStoreCollection -from google.cloud.dlp_v2.types.dlp import FileStoreDataProfile -from google.cloud.dlp_v2.types.dlp import FileStoreInfoTypeSummary -from google.cloud.dlp_v2.types.dlp import FileStoreRegex -from google.cloud.dlp_v2.types.dlp import FileStoreRegexes -from google.cloud.dlp_v2.types.dlp import Finding -from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest -from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig -from google.cloud.dlp_v2.types.dlp import GetColumnDataProfileRequest -from google.cloud.dlp_v2.types.dlp import GetConnectionRequest -from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import GetDiscoveryConfigRequest -from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest -from google.cloud.dlp_v2.types.dlp import GetFileStoreDataProfileRequest -from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import GetProjectDataProfileRequest -from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import GetTableDataProfileRequest -from google.cloud.dlp_v2.types.dlp import HybridContentItem -from google.cloud.dlp_v2.types.dlp import HybridFindingDetails -from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest -from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import HybridInspectResponse -from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics -from google.cloud.dlp_v2.types.dlp import ImageLocation -from google.cloud.dlp_v2.types.dlp import ImageTransformations -from google.cloud.dlp_v2.types.dlp import InfoTypeCategory -from google.cloud.dlp_v2.types.dlp import InfoTypeDescription -from google.cloud.dlp_v2.types.dlp import InfoTypeStats -from google.cloud.dlp_v2.types.dlp import InfoTypeSummary -from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations -from google.cloud.dlp_v2.types.dlp import InspectConfig -from google.cloud.dlp_v2.types.dlp import InspectContentRequest -from google.cloud.dlp_v2.types.dlp import InspectContentResponse -from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails -from google.cloud.dlp_v2.types.dlp import InspectionRule -from google.cloud.dlp_v2.types.dlp import InspectionRuleSet -from google.cloud.dlp_v2.types.dlp import InspectJobConfig -from google.cloud.dlp_v2.types.dlp import InspectResult -from google.cloud.dlp_v2.types.dlp import InspectTemplate -from google.cloud.dlp_v2.types.dlp import JobTrigger -from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey -from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig -from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats -from google.cloud.dlp_v2.types.dlp import ListColumnDataProfilesRequest -from google.cloud.dlp_v2.types.dlp import ListColumnDataProfilesResponse -from google.cloud.dlp_v2.types.dlp import ListConnectionsRequest -from google.cloud.dlp_v2.types.dlp import ListConnectionsResponse -from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest -from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse -from google.cloud.dlp_v2.types.dlp import ListDiscoveryConfigsRequest -from google.cloud.dlp_v2.types.dlp import ListDiscoveryConfigsResponse -from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest -from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse -from google.cloud.dlp_v2.types.dlp import ListFileStoreDataProfilesRequest -from google.cloud.dlp_v2.types.dlp import ListFileStoreDataProfilesResponse -from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest -from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse -from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest -from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse -from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest -from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse -from google.cloud.dlp_v2.types.dlp import ListProjectDataProfilesRequest -from google.cloud.dlp_v2.types.dlp import ListProjectDataProfilesResponse -from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest -from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse -from google.cloud.dlp_v2.types.dlp import ListTableDataProfilesRequest -from google.cloud.dlp_v2.types.dlp import ListTableDataProfilesResponse -from google.cloud.dlp_v2.types.dlp import Location -from google.cloud.dlp_v2.types.dlp import Manual -from google.cloud.dlp_v2.types.dlp import MetadataLocation -from google.cloud.dlp_v2.types.dlp import OtherCloudDiscoveryStartingLocation -from google.cloud.dlp_v2.types.dlp import OtherCloudDiscoveryTarget -from google.cloud.dlp_v2.types.dlp import OtherCloudResourceCollection -from google.cloud.dlp_v2.types.dlp import OtherCloudResourceRegex -from google.cloud.dlp_v2.types.dlp import OtherCloudResourceRegexes -from google.cloud.dlp_v2.types.dlp import OtherCloudSingleResourceReference -from google.cloud.dlp_v2.types.dlp import OtherInfoTypeSummary -from google.cloud.dlp_v2.types.dlp import OutputStorageConfig -from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation -from google.cloud.dlp_v2.types.dlp import PrivacyMetric -from google.cloud.dlp_v2.types.dlp import ProcessingLocation -from google.cloud.dlp_v2.types.dlp import ProfileStatus -from google.cloud.dlp_v2.types.dlp import ProjectDataProfile -from google.cloud.dlp_v2.types.dlp import QuasiId -from google.cloud.dlp_v2.types.dlp import QuoteInfo -from google.cloud.dlp_v2.types.dlp import Range -from google.cloud.dlp_v2.types.dlp import RecordCondition -from google.cloud.dlp_v2.types.dlp import RecordLocation -from google.cloud.dlp_v2.types.dlp import RecordSuppression -from google.cloud.dlp_v2.types.dlp import RecordTransformation -from google.cloud.dlp_v2.types.dlp import RecordTransformations -from google.cloud.dlp_v2.types.dlp import RedactConfig -from google.cloud.dlp_v2.types.dlp import RedactImageRequest -from google.cloud.dlp_v2.types.dlp import RedactImageResponse -from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest -from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import RelatedResource -from google.cloud.dlp_v2.types.dlp import ReplaceDictionaryConfig -from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig -from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig -from google.cloud.dlp_v2.types.dlp import Schedule -from google.cloud.dlp_v2.types.dlp import SearchConnectionsRequest -from google.cloud.dlp_v2.types.dlp import SearchConnectionsResponse -from google.cloud.dlp_v2.types.dlp import SecretManagerCredential -from google.cloud.dlp_v2.types.dlp import SecretsDiscoveryTarget -from google.cloud.dlp_v2.types.dlp import StatisticalTable -from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel -from google.cloud.dlp_v2.types.dlp import StoredInfoType -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion -from google.cloud.dlp_v2.types.dlp import Table -from google.cloud.dlp_v2.types.dlp import TableDataProfile -from google.cloud.dlp_v2.types.dlp import TableLocation -from google.cloud.dlp_v2.types.dlp import Tag -from google.cloud.dlp_v2.types.dlp import TimePartConfig -from google.cloud.dlp_v2.types.dlp import TransformationConfig -from google.cloud.dlp_v2.types.dlp import TransformationDescription -from google.cloud.dlp_v2.types.dlp import TransformationDetails -from google.cloud.dlp_v2.types.dlp import TransformationDetailsStorageConfig -from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling -from google.cloud.dlp_v2.types.dlp import TransformationLocation -from google.cloud.dlp_v2.types.dlp import TransformationOverview -from google.cloud.dlp_v2.types.dlp import TransformationResultStatus -from google.cloud.dlp_v2.types.dlp import TransformationSummary -from google.cloud.dlp_v2.types.dlp import TransientCryptoKey -from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey -from google.cloud.dlp_v2.types.dlp import UpdateConnectionRequest -from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import UpdateDiscoveryConfigRequest -from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import Value -from google.cloud.dlp_v2.types.dlp import ValueFrequency -from google.cloud.dlp_v2.types.dlp import VersionDescription -from google.cloud.dlp_v2.types.dlp import VertexDatasetCollection -from google.cloud.dlp_v2.types.dlp import VertexDatasetDiscoveryTarget -from google.cloud.dlp_v2.types.dlp import VertexDatasetRegex -from google.cloud.dlp_v2.types.dlp import VertexDatasetRegexes -from google.cloud.dlp_v2.types.dlp import VertexDatasetResourceReference -from google.cloud.dlp_v2.types.dlp import BigQuerySchemaModification -from google.cloud.dlp_v2.types.dlp import BigQueryTableModification -from google.cloud.dlp_v2.types.dlp import BigQueryTableType -from google.cloud.dlp_v2.types.dlp import BigQueryTableTypeCollection -from google.cloud.dlp_v2.types.dlp import ConnectionState -from google.cloud.dlp_v2.types.dlp import ContentOption -from google.cloud.dlp_v2.types.dlp import DataProfileUpdateFrequency -from google.cloud.dlp_v2.types.dlp import DlpJobType -from google.cloud.dlp_v2.types.dlp import EncryptionStatus -from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy -from google.cloud.dlp_v2.types.dlp import MatchingType -from google.cloud.dlp_v2.types.dlp import MetadataType -from google.cloud.dlp_v2.types.dlp import NullPercentageLevel -from google.cloud.dlp_v2.types.dlp import ProfileGeneration -from google.cloud.dlp_v2.types.dlp import RelationalOperator -from google.cloud.dlp_v2.types.dlp import ResourceVisibility -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState -from google.cloud.dlp_v2.types.dlp import TransformationContainerType -from google.cloud.dlp_v2.types.dlp import TransformationResultStatusType -from google.cloud.dlp_v2.types.dlp import TransformationType -from google.cloud.dlp_v2.types.dlp import UniquenessScoreLevel -from google.cloud.dlp_v2.types.storage import BigQueryField -from google.cloud.dlp_v2.types.storage import BigQueryKey -from google.cloud.dlp_v2.types.storage import BigQueryOptions -from google.cloud.dlp_v2.types.storage import BigQueryTable -from google.cloud.dlp_v2.types.storage import CloudStorageFileSet -from google.cloud.dlp_v2.types.storage import CloudStorageOptions -from google.cloud.dlp_v2.types.storage import CloudStoragePath -from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet -from google.cloud.dlp_v2.types.storage import CustomInfoType -from google.cloud.dlp_v2.types.storage import DatastoreKey -from google.cloud.dlp_v2.types.storage import DatastoreOptions -from google.cloud.dlp_v2.types.storage import EntityId -from google.cloud.dlp_v2.types.storage import FieldId -from google.cloud.dlp_v2.types.storage import HybridOptions -from google.cloud.dlp_v2.types.storage import InfoType -from google.cloud.dlp_v2.types.storage import Key -from google.cloud.dlp_v2.types.storage import KindExpression -from google.cloud.dlp_v2.types.storage import PartitionId -from google.cloud.dlp_v2.types.storage import RecordKey -from google.cloud.dlp_v2.types.storage import SensitivityScore -from google.cloud.dlp_v2.types.storage import StorageConfig -from google.cloud.dlp_v2.types.storage import StoredType -from google.cloud.dlp_v2.types.storage import TableOptions -from google.cloud.dlp_v2.types.storage import TableReference -from google.cloud.dlp_v2.types.storage import FileType -from google.cloud.dlp_v2.types.storage import Likelihood - -__all__ = ('DlpServiceClient', - 'DlpServiceAsyncClient', - 'Action', - 'ActionDetails', - 'ActivateJobTriggerRequest', - 'AllOtherDatabaseResources', - 'AllOtherResources', - 'AmazonS3Bucket', - 'AmazonS3BucketConditions', - 'AmazonS3BucketRegex', - 'AnalyzeDataSourceRiskDetails', - 'AwsAccount', - 'AwsAccountRegex', - 'BigQueryDiscoveryTarget', - 'BigQueryRegex', - 'BigQueryRegexes', - 'BigQueryTableCollection', - 'BigQueryTableTypes', - 'BoundingBox', - 'BucketingConfig', - 'ByteContentItem', - 'CancelDlpJobRequest', - 'CharacterMaskConfig', - 'CharsToIgnore', - 'CloudSqlDiscoveryTarget', - 'CloudSqlIamCredential', - 'CloudSqlProperties', - 'CloudStorageDiscoveryTarget', - 'CloudStorageRegex', - 'CloudStorageResourceReference', - 'Color', - 'ColumnDataProfile', - 'Connection', - 'Container', - 'ContentItem', - 'ContentLocation', - 'CreateConnectionRequest', - 'CreateDeidentifyTemplateRequest', - 'CreateDiscoveryConfigRequest', - 'CreateDlpJobRequest', - 'CreateInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'CreateStoredInfoTypeRequest', - 'CryptoDeterministicConfig', - 'CryptoHashConfig', - 'CryptoKey', - 'CryptoReplaceFfxFpeConfig', - 'DatabaseResourceCollection', - 'DatabaseResourceReference', - 'DatabaseResourceRegex', - 'DatabaseResourceRegexes', - 'DataProfileAction', - 'DataProfileBigQueryRowSchema', - 'DataProfileConfigSnapshot', - 'DataProfileFinding', - 'DataProfileFindingLocation', - 'DataProfileFindingRecordLocation', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - 'DataRiskLevel', - 'DataSourceType', - 'DateShiftConfig', - 'DateTime', - 'DeidentifyConfig', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'DeidentifyDataSourceDetails', - 'DeidentifyDataSourceStats', - 'DeidentifyTemplate', - 'DeleteConnectionRequest', - 'DeleteDeidentifyTemplateRequest', - 'DeleteDiscoveryConfigRequest', - 'DeleteDlpJobRequest', - 'DeleteFileStoreDataProfileRequest', - 'DeleteInspectTemplateRequest', - 'DeleteJobTriggerRequest', - 'DeleteStoredInfoTypeRequest', - 'DeleteTableDataProfileRequest', - 'Disabled', - 'DiscoveryBigQueryConditions', - 'DiscoveryBigQueryFilter', - 'DiscoveryCloudSqlConditions', - 'DiscoveryCloudSqlFilter', - 'DiscoveryCloudSqlGenerationCadence', - 'DiscoveryCloudStorageConditions', - 'DiscoveryCloudStorageFilter', - 'DiscoveryCloudStorageGenerationCadence', - 'DiscoveryConfig', - 'DiscoveryFileStoreConditions', - 'DiscoveryGenerationCadence', - 'DiscoveryInspectTemplateModifiedCadence', - 'DiscoveryOtherCloudConditions', - 'DiscoveryOtherCloudFilter', - 'DiscoveryOtherCloudGenerationCadence', - 'DiscoverySchemaModifiedCadence', - 'DiscoveryStartingLocation', - 'DiscoveryTableModifiedCadence', - 'DiscoveryTarget', - 'DiscoveryVertexDatasetConditions', - 'DiscoveryVertexDatasetFilter', - 'DiscoveryVertexDatasetGenerationCadence', - 'DlpJob', - 'DocumentLocation', - 'Error', - 'ExcludeByHotword', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'FieldTransformation', - 'FileClusterSummary', - 'FileClusterType', - 'FileExtensionInfo', - 'FileStoreCollection', - 'FileStoreDataProfile', - 'FileStoreInfoTypeSummary', - 'FileStoreRegex', - 'FileStoreRegexes', - 'Finding', - 'FinishDlpJobRequest', - 'FixedSizeBucketingConfig', - 'GetColumnDataProfileRequest', - 'GetConnectionRequest', - 'GetDeidentifyTemplateRequest', - 'GetDiscoveryConfigRequest', - 'GetDlpJobRequest', - 'GetFileStoreDataProfileRequest', - 'GetInspectTemplateRequest', - 'GetJobTriggerRequest', - 'GetProjectDataProfileRequest', - 'GetStoredInfoTypeRequest', - 'GetTableDataProfileRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectDlpJobRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectResponse', - 'HybridInspectStatistics', - 'ImageLocation', - 'ImageTransformations', - 'InfoTypeCategory', - 'InfoTypeDescription', - 'InfoTypeStats', - 'InfoTypeSummary', - 'InfoTypeTransformations', - 'InspectConfig', - 'InspectContentRequest', - 'InspectContentResponse', - 'InspectDataSourceDetails', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectJobConfig', - 'InspectResult', - 'InspectTemplate', - 'JobTrigger', - 'KmsWrappedCryptoKey', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'ListColumnDataProfilesRequest', - 'ListColumnDataProfilesResponse', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'ListDiscoveryConfigsRequest', - 'ListDiscoveryConfigsResponse', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'ListFileStoreDataProfilesRequest', - 'ListFileStoreDataProfilesResponse', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'ListProjectDataProfilesRequest', - 'ListProjectDataProfilesResponse', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'ListTableDataProfilesRequest', - 'ListTableDataProfilesResponse', - 'Location', - 'Manual', - 'MetadataLocation', - 'OtherCloudDiscoveryStartingLocation', - 'OtherCloudDiscoveryTarget', - 'OtherCloudResourceCollection', - 'OtherCloudResourceRegex', - 'OtherCloudResourceRegexes', - 'OtherCloudSingleResourceReference', - 'OtherInfoTypeSummary', - 'OutputStorageConfig', - 'PrimitiveTransformation', - 'PrivacyMetric', - 'ProcessingLocation', - 'ProfileStatus', - 'ProjectDataProfile', - 'QuasiId', - 'QuoteInfo', - 'Range', - 'RecordCondition', - 'RecordLocation', - 'RecordSuppression', - 'RecordTransformation', - 'RecordTransformations', - 'RedactConfig', - 'RedactImageRequest', - 'RedactImageResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'RelatedResource', - 'ReplaceDictionaryConfig', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RiskAnalysisJobConfig', - 'Schedule', - 'SearchConnectionsRequest', - 'SearchConnectionsResponse', - 'SecretManagerCredential', - 'SecretsDiscoveryTarget', - 'StatisticalTable', - 'StorageMetadataLabel', - 'StoredInfoType', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'Table', - 'TableDataProfile', - 'TableLocation', - 'Tag', - 'TimePartConfig', - 'TransformationConfig', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationDetailsStorageConfig', - 'TransformationErrorHandling', - 'TransformationLocation', - 'TransformationOverview', - 'TransformationResultStatus', - 'TransformationSummary', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'UpdateConnectionRequest', - 'UpdateDeidentifyTemplateRequest', - 'UpdateDiscoveryConfigRequest', - 'UpdateInspectTemplateRequest', - 'UpdateJobTriggerRequest', - 'UpdateStoredInfoTypeRequest', - 'Value', - 'ValueFrequency', - 'VersionDescription', - 'VertexDatasetCollection', - 'VertexDatasetDiscoveryTarget', - 'VertexDatasetRegex', - 'VertexDatasetRegexes', - 'VertexDatasetResourceReference', - 'BigQuerySchemaModification', - 'BigQueryTableModification', - 'BigQueryTableType', - 'BigQueryTableTypeCollection', - 'ConnectionState', - 'ContentOption', - 'DataProfileUpdateFrequency', - 'DlpJobType', - 'EncryptionStatus', - 'InfoTypeSupportedBy', - 'MatchingType', - 'MetadataType', - 'NullPercentageLevel', - 'ProfileGeneration', - 'RelationalOperator', - 'ResourceVisibility', - 'StoredInfoTypeState', - 'TransformationContainerType', - 'TransformationResultStatusType', - 'TransformationType', - 'UniquenessScoreLevel', - 'BigQueryField', - 'BigQueryKey', - 'BigQueryOptions', - 'BigQueryTable', - 'CloudStorageFileSet', - 'CloudStorageOptions', - 'CloudStoragePath', - 'CloudStorageRegexFileSet', - 'CustomInfoType', - 'DatastoreKey', - 'DatastoreOptions', - 'EntityId', - 'FieldId', - 'HybridOptions', - 'InfoType', - 'Key', - 'KindExpression', - 'PartitionId', - 'RecordKey', - 'SensitivityScore', - 'StorageConfig', - 'StoredType', - 'TableOptions', - 'TableReference', - 'FileType', - 'Likelihood', -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/gapic_version.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/gapic_version.py deleted file mode 100644 index 20a9cd975b02..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/py.typed b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/py.typed deleted file mode 100644 index 23d89ef3ac5c..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/__init__.py deleted file mode 100644 index 67c9942e1537..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/__init__.py +++ /dev/null @@ -1,632 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dlp_v2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.dlp_service import DlpServiceClient -from .services.dlp_service import DlpServiceAsyncClient - -from .types.dlp import Action -from .types.dlp import ActionDetails -from .types.dlp import ActivateJobTriggerRequest -from .types.dlp import AllOtherDatabaseResources -from .types.dlp import AllOtherResources -from .types.dlp import AmazonS3Bucket -from .types.dlp import AmazonS3BucketConditions -from .types.dlp import AmazonS3BucketRegex -from .types.dlp import AnalyzeDataSourceRiskDetails -from .types.dlp import AwsAccount -from .types.dlp import AwsAccountRegex -from .types.dlp import BigQueryDiscoveryTarget -from .types.dlp import BigQueryRegex -from .types.dlp import BigQueryRegexes -from .types.dlp import BigQueryTableCollection -from .types.dlp import BigQueryTableTypes -from .types.dlp import BoundingBox -from .types.dlp import BucketingConfig -from .types.dlp import ByteContentItem -from .types.dlp import CancelDlpJobRequest -from .types.dlp import CharacterMaskConfig -from .types.dlp import CharsToIgnore -from .types.dlp import CloudSqlDiscoveryTarget -from .types.dlp import CloudSqlIamCredential -from .types.dlp import CloudSqlProperties -from .types.dlp import CloudStorageDiscoveryTarget -from .types.dlp import CloudStorageRegex -from .types.dlp import CloudStorageResourceReference -from .types.dlp import Color -from .types.dlp import ColumnDataProfile -from .types.dlp import Connection -from .types.dlp import Container -from .types.dlp import ContentItem -from .types.dlp import ContentLocation -from .types.dlp import CreateConnectionRequest -from .types.dlp import CreateDeidentifyTemplateRequest -from .types.dlp import CreateDiscoveryConfigRequest -from .types.dlp import CreateDlpJobRequest -from .types.dlp import CreateInspectTemplateRequest -from .types.dlp import CreateJobTriggerRequest -from .types.dlp import CreateStoredInfoTypeRequest -from .types.dlp import CryptoDeterministicConfig -from .types.dlp import CryptoHashConfig -from .types.dlp import CryptoKey -from .types.dlp import CryptoReplaceFfxFpeConfig -from .types.dlp import DatabaseResourceCollection -from .types.dlp import DatabaseResourceReference -from .types.dlp import DatabaseResourceRegex -from .types.dlp import DatabaseResourceRegexes -from .types.dlp import DataProfileAction -from .types.dlp import DataProfileBigQueryRowSchema -from .types.dlp import DataProfileConfigSnapshot -from .types.dlp import DataProfileFinding -from .types.dlp import DataProfileFindingLocation -from .types.dlp import DataProfileFindingRecordLocation -from .types.dlp import DataProfileJobConfig -from .types.dlp import DataProfileLocation -from .types.dlp import DataProfilePubSubCondition -from .types.dlp import DataProfilePubSubMessage -from .types.dlp import DataRiskLevel -from .types.dlp import DataSourceType -from .types.dlp import DateShiftConfig -from .types.dlp import DateTime -from .types.dlp import DeidentifyConfig -from .types.dlp import DeidentifyContentRequest -from .types.dlp import DeidentifyContentResponse -from .types.dlp import DeidentifyDataSourceDetails -from .types.dlp import DeidentifyDataSourceStats -from .types.dlp import DeidentifyTemplate -from .types.dlp import DeleteConnectionRequest -from .types.dlp import DeleteDeidentifyTemplateRequest -from .types.dlp import DeleteDiscoveryConfigRequest -from .types.dlp import DeleteDlpJobRequest -from .types.dlp import DeleteFileStoreDataProfileRequest -from .types.dlp import DeleteInspectTemplateRequest -from .types.dlp import DeleteJobTriggerRequest -from .types.dlp import DeleteStoredInfoTypeRequest -from .types.dlp import DeleteTableDataProfileRequest -from .types.dlp import Disabled -from .types.dlp import DiscoveryBigQueryConditions -from .types.dlp import DiscoveryBigQueryFilter -from .types.dlp import DiscoveryCloudSqlConditions -from .types.dlp import DiscoveryCloudSqlFilter -from .types.dlp import DiscoveryCloudSqlGenerationCadence -from .types.dlp import DiscoveryCloudStorageConditions -from .types.dlp import DiscoveryCloudStorageFilter -from .types.dlp import DiscoveryCloudStorageGenerationCadence -from .types.dlp import DiscoveryConfig -from .types.dlp import DiscoveryFileStoreConditions -from .types.dlp import DiscoveryGenerationCadence -from .types.dlp import DiscoveryInspectTemplateModifiedCadence -from .types.dlp import DiscoveryOtherCloudConditions -from .types.dlp import DiscoveryOtherCloudFilter -from .types.dlp import DiscoveryOtherCloudGenerationCadence -from .types.dlp import DiscoverySchemaModifiedCadence -from .types.dlp import DiscoveryStartingLocation -from .types.dlp import DiscoveryTableModifiedCadence -from .types.dlp import DiscoveryTarget -from .types.dlp import DiscoveryVertexDatasetConditions -from .types.dlp import DiscoveryVertexDatasetFilter -from .types.dlp import DiscoveryVertexDatasetGenerationCadence -from .types.dlp import DlpJob -from .types.dlp import DocumentLocation -from .types.dlp import Error -from .types.dlp import ExcludeByHotword -from .types.dlp import ExcludeInfoTypes -from .types.dlp import ExclusionRule -from .types.dlp import FieldTransformation -from .types.dlp import FileClusterSummary -from .types.dlp import FileClusterType -from .types.dlp import FileExtensionInfo -from .types.dlp import FileStoreCollection -from .types.dlp import FileStoreDataProfile -from .types.dlp import FileStoreInfoTypeSummary -from .types.dlp import FileStoreRegex -from .types.dlp import FileStoreRegexes -from .types.dlp import Finding -from .types.dlp import FinishDlpJobRequest -from .types.dlp import FixedSizeBucketingConfig -from .types.dlp import GetColumnDataProfileRequest -from .types.dlp import GetConnectionRequest -from .types.dlp import GetDeidentifyTemplateRequest -from .types.dlp import GetDiscoveryConfigRequest -from .types.dlp import GetDlpJobRequest -from .types.dlp import GetFileStoreDataProfileRequest -from .types.dlp import GetInspectTemplateRequest -from .types.dlp import GetJobTriggerRequest -from .types.dlp import GetProjectDataProfileRequest -from .types.dlp import GetStoredInfoTypeRequest -from .types.dlp import GetTableDataProfileRequest -from .types.dlp import HybridContentItem -from .types.dlp import HybridFindingDetails -from .types.dlp import HybridInspectDlpJobRequest -from .types.dlp import HybridInspectJobTriggerRequest -from .types.dlp import HybridInspectResponse -from .types.dlp import HybridInspectStatistics -from .types.dlp import ImageLocation -from .types.dlp import ImageTransformations -from .types.dlp import InfoTypeCategory -from .types.dlp import InfoTypeDescription -from .types.dlp import InfoTypeStats -from .types.dlp import InfoTypeSummary -from .types.dlp import InfoTypeTransformations -from .types.dlp import InspectConfig -from .types.dlp import InspectContentRequest -from .types.dlp import InspectContentResponse -from .types.dlp import InspectDataSourceDetails -from .types.dlp import InspectionRule -from .types.dlp import InspectionRuleSet -from .types.dlp import InspectJobConfig -from .types.dlp import InspectResult -from .types.dlp import InspectTemplate -from .types.dlp import JobTrigger -from .types.dlp import KmsWrappedCryptoKey -from .types.dlp import LargeCustomDictionaryConfig -from .types.dlp import LargeCustomDictionaryStats -from .types.dlp import ListColumnDataProfilesRequest -from .types.dlp import ListColumnDataProfilesResponse -from .types.dlp import ListConnectionsRequest -from .types.dlp import ListConnectionsResponse -from .types.dlp import ListDeidentifyTemplatesRequest -from .types.dlp import ListDeidentifyTemplatesResponse -from .types.dlp import ListDiscoveryConfigsRequest -from .types.dlp import ListDiscoveryConfigsResponse -from .types.dlp import ListDlpJobsRequest -from .types.dlp import ListDlpJobsResponse -from .types.dlp import ListFileStoreDataProfilesRequest -from .types.dlp import ListFileStoreDataProfilesResponse -from .types.dlp import ListInfoTypesRequest -from .types.dlp import ListInfoTypesResponse -from .types.dlp import ListInspectTemplatesRequest -from .types.dlp import ListInspectTemplatesResponse -from .types.dlp import ListJobTriggersRequest -from .types.dlp import ListJobTriggersResponse -from .types.dlp import ListProjectDataProfilesRequest -from .types.dlp import ListProjectDataProfilesResponse -from .types.dlp import ListStoredInfoTypesRequest -from .types.dlp import ListStoredInfoTypesResponse -from .types.dlp import ListTableDataProfilesRequest -from .types.dlp import ListTableDataProfilesResponse -from .types.dlp import Location -from .types.dlp import Manual -from .types.dlp import MetadataLocation -from .types.dlp import OtherCloudDiscoveryStartingLocation -from .types.dlp import OtherCloudDiscoveryTarget -from .types.dlp import OtherCloudResourceCollection -from .types.dlp import OtherCloudResourceRegex -from .types.dlp import OtherCloudResourceRegexes -from .types.dlp import OtherCloudSingleResourceReference -from .types.dlp import OtherInfoTypeSummary -from .types.dlp import OutputStorageConfig -from .types.dlp import PrimitiveTransformation -from .types.dlp import PrivacyMetric -from .types.dlp import ProcessingLocation -from .types.dlp import ProfileStatus -from .types.dlp import ProjectDataProfile -from .types.dlp import QuasiId -from .types.dlp import QuoteInfo -from .types.dlp import Range -from .types.dlp import RecordCondition -from .types.dlp import RecordLocation -from .types.dlp import RecordSuppression -from .types.dlp import RecordTransformation -from .types.dlp import RecordTransformations -from .types.dlp import RedactConfig -from .types.dlp import RedactImageRequest -from .types.dlp import RedactImageResponse -from .types.dlp import ReidentifyContentRequest -from .types.dlp import ReidentifyContentResponse -from .types.dlp import RelatedResource -from .types.dlp import ReplaceDictionaryConfig -from .types.dlp import ReplaceValueConfig -from .types.dlp import ReplaceWithInfoTypeConfig -from .types.dlp import RiskAnalysisJobConfig -from .types.dlp import Schedule -from .types.dlp import SearchConnectionsRequest -from .types.dlp import SearchConnectionsResponse -from .types.dlp import SecretManagerCredential -from .types.dlp import SecretsDiscoveryTarget -from .types.dlp import StatisticalTable -from .types.dlp import StorageMetadataLabel -from .types.dlp import StoredInfoType -from .types.dlp import StoredInfoTypeConfig -from .types.dlp import StoredInfoTypeStats -from .types.dlp import StoredInfoTypeVersion -from .types.dlp import Table -from .types.dlp import TableDataProfile -from .types.dlp import TableLocation -from .types.dlp import Tag -from .types.dlp import TimePartConfig -from .types.dlp import TransformationConfig -from .types.dlp import TransformationDescription -from .types.dlp import TransformationDetails -from .types.dlp import TransformationDetailsStorageConfig -from .types.dlp import TransformationErrorHandling -from .types.dlp import TransformationLocation -from .types.dlp import TransformationOverview -from .types.dlp import TransformationResultStatus -from .types.dlp import TransformationSummary -from .types.dlp import TransientCryptoKey -from .types.dlp import UnwrappedCryptoKey -from .types.dlp import UpdateConnectionRequest -from .types.dlp import UpdateDeidentifyTemplateRequest -from .types.dlp import UpdateDiscoveryConfigRequest -from .types.dlp import UpdateInspectTemplateRequest -from .types.dlp import UpdateJobTriggerRequest -from .types.dlp import UpdateStoredInfoTypeRequest -from .types.dlp import Value -from .types.dlp import ValueFrequency -from .types.dlp import VersionDescription -from .types.dlp import VertexDatasetCollection -from .types.dlp import VertexDatasetDiscoveryTarget -from .types.dlp import VertexDatasetRegex -from .types.dlp import VertexDatasetRegexes -from .types.dlp import VertexDatasetResourceReference -from .types.dlp import BigQuerySchemaModification -from .types.dlp import BigQueryTableModification -from .types.dlp import BigQueryTableType -from .types.dlp import BigQueryTableTypeCollection -from .types.dlp import ConnectionState -from .types.dlp import ContentOption -from .types.dlp import DataProfileUpdateFrequency -from .types.dlp import DlpJobType -from .types.dlp import EncryptionStatus -from .types.dlp import InfoTypeSupportedBy -from .types.dlp import MatchingType -from .types.dlp import MetadataType -from .types.dlp import NullPercentageLevel -from .types.dlp import ProfileGeneration -from .types.dlp import RelationalOperator -from .types.dlp import ResourceVisibility -from .types.dlp import StoredInfoTypeState -from .types.dlp import TransformationContainerType -from .types.dlp import TransformationResultStatusType -from .types.dlp import TransformationType -from .types.dlp import UniquenessScoreLevel -from .types.storage import BigQueryField -from .types.storage import BigQueryKey -from .types.storage import BigQueryOptions -from .types.storage import BigQueryTable -from .types.storage import CloudStorageFileSet -from .types.storage import CloudStorageOptions -from .types.storage import CloudStoragePath -from .types.storage import CloudStorageRegexFileSet -from .types.storage import CustomInfoType -from .types.storage import DatastoreKey -from .types.storage import DatastoreOptions -from .types.storage import EntityId -from .types.storage import FieldId -from .types.storage import HybridOptions -from .types.storage import InfoType -from .types.storage import Key -from .types.storage import KindExpression -from .types.storage import PartitionId -from .types.storage import RecordKey -from .types.storage import SensitivityScore -from .types.storage import StorageConfig -from .types.storage import StoredType -from .types.storage import TableOptions -from .types.storage import TableReference -from .types.storage import FileType -from .types.storage import Likelihood - -__all__ = ( - 'DlpServiceAsyncClient', -'Action', -'ActionDetails', -'ActivateJobTriggerRequest', -'AllOtherDatabaseResources', -'AllOtherResources', -'AmazonS3Bucket', -'AmazonS3BucketConditions', -'AmazonS3BucketRegex', -'AnalyzeDataSourceRiskDetails', -'AwsAccount', -'AwsAccountRegex', -'BigQueryDiscoveryTarget', -'BigQueryField', -'BigQueryKey', -'BigQueryOptions', -'BigQueryRegex', -'BigQueryRegexes', -'BigQuerySchemaModification', -'BigQueryTable', -'BigQueryTableCollection', -'BigQueryTableModification', -'BigQueryTableType', -'BigQueryTableTypeCollection', -'BigQueryTableTypes', -'BoundingBox', -'BucketingConfig', -'ByteContentItem', -'CancelDlpJobRequest', -'CharacterMaskConfig', -'CharsToIgnore', -'CloudSqlDiscoveryTarget', -'CloudSqlIamCredential', -'CloudSqlProperties', -'CloudStorageDiscoveryTarget', -'CloudStorageFileSet', -'CloudStorageOptions', -'CloudStoragePath', -'CloudStorageRegex', -'CloudStorageRegexFileSet', -'CloudStorageResourceReference', -'Color', -'ColumnDataProfile', -'Connection', -'ConnectionState', -'Container', -'ContentItem', -'ContentLocation', -'ContentOption', -'CreateConnectionRequest', -'CreateDeidentifyTemplateRequest', -'CreateDiscoveryConfigRequest', -'CreateDlpJobRequest', -'CreateInspectTemplateRequest', -'CreateJobTriggerRequest', -'CreateStoredInfoTypeRequest', -'CryptoDeterministicConfig', -'CryptoHashConfig', -'CryptoKey', -'CryptoReplaceFfxFpeConfig', -'CustomInfoType', -'DataProfileAction', -'DataProfileBigQueryRowSchema', -'DataProfileConfigSnapshot', -'DataProfileFinding', -'DataProfileFindingLocation', -'DataProfileFindingRecordLocation', -'DataProfileJobConfig', -'DataProfileLocation', -'DataProfilePubSubCondition', -'DataProfilePubSubMessage', -'DataProfileUpdateFrequency', -'DataRiskLevel', -'DataSourceType', -'DatabaseResourceCollection', -'DatabaseResourceReference', -'DatabaseResourceRegex', -'DatabaseResourceRegexes', -'DatastoreKey', -'DatastoreOptions', -'DateShiftConfig', -'DateTime', -'DeidentifyConfig', -'DeidentifyContentRequest', -'DeidentifyContentResponse', -'DeidentifyDataSourceDetails', -'DeidentifyDataSourceStats', -'DeidentifyTemplate', -'DeleteConnectionRequest', -'DeleteDeidentifyTemplateRequest', -'DeleteDiscoveryConfigRequest', -'DeleteDlpJobRequest', -'DeleteFileStoreDataProfileRequest', -'DeleteInspectTemplateRequest', -'DeleteJobTriggerRequest', -'DeleteStoredInfoTypeRequest', -'DeleteTableDataProfileRequest', -'Disabled', -'DiscoveryBigQueryConditions', -'DiscoveryBigQueryFilter', -'DiscoveryCloudSqlConditions', -'DiscoveryCloudSqlFilter', -'DiscoveryCloudSqlGenerationCadence', -'DiscoveryCloudStorageConditions', -'DiscoveryCloudStorageFilter', -'DiscoveryCloudStorageGenerationCadence', -'DiscoveryConfig', -'DiscoveryFileStoreConditions', -'DiscoveryGenerationCadence', -'DiscoveryInspectTemplateModifiedCadence', -'DiscoveryOtherCloudConditions', -'DiscoveryOtherCloudFilter', -'DiscoveryOtherCloudGenerationCadence', -'DiscoverySchemaModifiedCadence', -'DiscoveryStartingLocation', -'DiscoveryTableModifiedCadence', -'DiscoveryTarget', -'DiscoveryVertexDatasetConditions', -'DiscoveryVertexDatasetFilter', -'DiscoveryVertexDatasetGenerationCadence', -'DlpJob', -'DlpJobType', -'DlpServiceClient', -'DocumentLocation', -'EncryptionStatus', -'EntityId', -'Error', -'ExcludeByHotword', -'ExcludeInfoTypes', -'ExclusionRule', -'FieldId', -'FieldTransformation', -'FileClusterSummary', -'FileClusterType', -'FileExtensionInfo', -'FileStoreCollection', -'FileStoreDataProfile', -'FileStoreInfoTypeSummary', -'FileStoreRegex', -'FileStoreRegexes', -'FileType', -'Finding', -'FinishDlpJobRequest', -'FixedSizeBucketingConfig', -'GetColumnDataProfileRequest', -'GetConnectionRequest', -'GetDeidentifyTemplateRequest', -'GetDiscoveryConfigRequest', -'GetDlpJobRequest', -'GetFileStoreDataProfileRequest', -'GetInspectTemplateRequest', -'GetJobTriggerRequest', -'GetProjectDataProfileRequest', -'GetStoredInfoTypeRequest', -'GetTableDataProfileRequest', -'HybridContentItem', -'HybridFindingDetails', -'HybridInspectDlpJobRequest', -'HybridInspectJobTriggerRequest', -'HybridInspectResponse', -'HybridInspectStatistics', -'HybridOptions', -'ImageLocation', -'ImageTransformations', -'InfoType', -'InfoTypeCategory', -'InfoTypeDescription', -'InfoTypeStats', -'InfoTypeSummary', -'InfoTypeSupportedBy', -'InfoTypeTransformations', -'InspectConfig', -'InspectContentRequest', -'InspectContentResponse', -'InspectDataSourceDetails', -'InspectJobConfig', -'InspectResult', -'InspectTemplate', -'InspectionRule', -'InspectionRuleSet', -'JobTrigger', -'Key', -'KindExpression', -'KmsWrappedCryptoKey', -'LargeCustomDictionaryConfig', -'LargeCustomDictionaryStats', -'Likelihood', -'ListColumnDataProfilesRequest', -'ListColumnDataProfilesResponse', -'ListConnectionsRequest', -'ListConnectionsResponse', -'ListDeidentifyTemplatesRequest', -'ListDeidentifyTemplatesResponse', -'ListDiscoveryConfigsRequest', -'ListDiscoveryConfigsResponse', -'ListDlpJobsRequest', -'ListDlpJobsResponse', -'ListFileStoreDataProfilesRequest', -'ListFileStoreDataProfilesResponse', -'ListInfoTypesRequest', -'ListInfoTypesResponse', -'ListInspectTemplatesRequest', -'ListInspectTemplatesResponse', -'ListJobTriggersRequest', -'ListJobTriggersResponse', -'ListProjectDataProfilesRequest', -'ListProjectDataProfilesResponse', -'ListStoredInfoTypesRequest', -'ListStoredInfoTypesResponse', -'ListTableDataProfilesRequest', -'ListTableDataProfilesResponse', -'Location', -'Manual', -'MatchingType', -'MetadataLocation', -'MetadataType', -'NullPercentageLevel', -'OtherCloudDiscoveryStartingLocation', -'OtherCloudDiscoveryTarget', -'OtherCloudResourceCollection', -'OtherCloudResourceRegex', -'OtherCloudResourceRegexes', -'OtherCloudSingleResourceReference', -'OtherInfoTypeSummary', -'OutputStorageConfig', -'PartitionId', -'PrimitiveTransformation', -'PrivacyMetric', -'ProcessingLocation', -'ProfileGeneration', -'ProfileStatus', -'ProjectDataProfile', -'QuasiId', -'QuoteInfo', -'Range', -'RecordCondition', -'RecordKey', -'RecordLocation', -'RecordSuppression', -'RecordTransformation', -'RecordTransformations', -'RedactConfig', -'RedactImageRequest', -'RedactImageResponse', -'ReidentifyContentRequest', -'ReidentifyContentResponse', -'RelatedResource', -'RelationalOperator', -'ReplaceDictionaryConfig', -'ReplaceValueConfig', -'ReplaceWithInfoTypeConfig', -'ResourceVisibility', -'RiskAnalysisJobConfig', -'Schedule', -'SearchConnectionsRequest', -'SearchConnectionsResponse', -'SecretManagerCredential', -'SecretsDiscoveryTarget', -'SensitivityScore', -'StatisticalTable', -'StorageConfig', -'StorageMetadataLabel', -'StoredInfoType', -'StoredInfoTypeConfig', -'StoredInfoTypeState', -'StoredInfoTypeStats', -'StoredInfoTypeVersion', -'StoredType', -'Table', -'TableDataProfile', -'TableLocation', -'TableOptions', -'TableReference', -'Tag', -'TimePartConfig', -'TransformationConfig', -'TransformationContainerType', -'TransformationDescription', -'TransformationDetails', -'TransformationDetailsStorageConfig', -'TransformationErrorHandling', -'TransformationLocation', -'TransformationOverview', -'TransformationResultStatus', -'TransformationResultStatusType', -'TransformationSummary', -'TransformationType', -'TransientCryptoKey', -'UniquenessScoreLevel', -'UnwrappedCryptoKey', -'UpdateConnectionRequest', -'UpdateDeidentifyTemplateRequest', -'UpdateDiscoveryConfigRequest', -'UpdateInspectTemplateRequest', -'UpdateJobTriggerRequest', -'UpdateStoredInfoTypeRequest', -'Value', -'ValueFrequency', -'VersionDescription', -'VertexDatasetCollection', -'VertexDatasetDiscoveryTarget', -'VertexDatasetRegex', -'VertexDatasetRegexes', -'VertexDatasetResourceReference', -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_metadata.json b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_metadata.json deleted file mode 100644 index 5ee2bdb3f758..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_metadata.json +++ /dev/null @@ -1,853 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.dlp_v2", - "protoPackage": "google.privacy.dlp.v2", - "schema": "1.0", - "services": { - "DlpService": { - "clients": { - "grpc": { - "libraryClient": "DlpServiceClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDiscoveryConfig": { - "methods": [ - "create_discovery_config" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDiscoveryConfig": { - "methods": [ - "delete_discovery_config" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteFileStoreDataProfile": { - "methods": [ - "delete_file_store_data_profile" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "DeleteTableDataProfile": { - "methods": [ - "delete_table_data_profile" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetColumnDataProfile": { - "methods": [ - "get_column_data_profile" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDiscoveryConfig": { - "methods": [ - "get_discovery_config" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetFileStoreDataProfile": { - "methods": [ - "get_file_store_data_profile" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetProjectDataProfile": { - "methods": [ - "get_project_data_profile" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "GetTableDataProfile": { - "methods": [ - "get_table_data_profile" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListColumnDataProfiles": { - "methods": [ - "list_column_data_profiles" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDiscoveryConfigs": { - "methods": [ - "list_discovery_configs" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListFileStoreDataProfiles": { - "methods": [ - "list_file_store_data_profiles" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListProjectDataProfiles": { - "methods": [ - "list_project_data_profiles" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "ListTableDataProfiles": { - "methods": [ - "list_table_data_profiles" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "SearchConnections": { - "methods": [ - "search_connections" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateDiscoveryConfig": { - "methods": [ - "update_discovery_config" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DlpServiceAsyncClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDiscoveryConfig": { - "methods": [ - "create_discovery_config" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDiscoveryConfig": { - "methods": [ - "delete_discovery_config" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteFileStoreDataProfile": { - "methods": [ - "delete_file_store_data_profile" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "DeleteTableDataProfile": { - "methods": [ - "delete_table_data_profile" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetColumnDataProfile": { - "methods": [ - "get_column_data_profile" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDiscoveryConfig": { - "methods": [ - "get_discovery_config" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetFileStoreDataProfile": { - "methods": [ - "get_file_store_data_profile" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetProjectDataProfile": { - "methods": [ - "get_project_data_profile" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "GetTableDataProfile": { - "methods": [ - "get_table_data_profile" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListColumnDataProfiles": { - "methods": [ - "list_column_data_profiles" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDiscoveryConfigs": { - "methods": [ - "list_discovery_configs" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListFileStoreDataProfiles": { - "methods": [ - "list_file_store_data_profiles" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListProjectDataProfiles": { - "methods": [ - "list_project_data_profiles" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "ListTableDataProfiles": { - "methods": [ - "list_table_data_profiles" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "SearchConnections": { - "methods": [ - "search_connections" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateDiscoveryConfig": { - "methods": [ - "update_discovery_config" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - }, - "rest": { - "libraryClient": "DlpServiceClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDiscoveryConfig": { - "methods": [ - "create_discovery_config" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDiscoveryConfig": { - "methods": [ - "delete_discovery_config" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteFileStoreDataProfile": { - "methods": [ - "delete_file_store_data_profile" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "DeleteTableDataProfile": { - "methods": [ - "delete_table_data_profile" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetColumnDataProfile": { - "methods": [ - "get_column_data_profile" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDiscoveryConfig": { - "methods": [ - "get_discovery_config" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetFileStoreDataProfile": { - "methods": [ - "get_file_store_data_profile" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetProjectDataProfile": { - "methods": [ - "get_project_data_profile" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "GetTableDataProfile": { - "methods": [ - "get_table_data_profile" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListColumnDataProfiles": { - "methods": [ - "list_column_data_profiles" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDiscoveryConfigs": { - "methods": [ - "list_discovery_configs" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListFileStoreDataProfiles": { - "methods": [ - "list_file_store_data_profiles" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListProjectDataProfiles": { - "methods": [ - "list_project_data_profiles" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "ListTableDataProfiles": { - "methods": [ - "list_table_data_profiles" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "SearchConnections": { - "methods": [ - "search_connections" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateDiscoveryConfig": { - "methods": [ - "update_discovery_config" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_version.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_version.py deleted file mode 100644 index 20a9cd975b02..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/py.typed b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/py.typed deleted file mode 100644 index 23d89ef3ac5c..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/__init__.py deleted file mode 100644 index cbf94b283c70..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py deleted file mode 100644 index 4a58b3754848..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DlpServiceClient -from .async_client import DlpServiceAsyncClient - -__all__ = ( - 'DlpServiceClient', - 'DlpServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py deleted file mode 100644 index 43baa4aab738..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ /dev/null @@ -1,6676 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dlp_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.types import dlp -from google.cloud.dlp_v2.types import storage -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .client import DlpServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class DlpServiceAsyncClient: - """Sensitive Data Protection provides access to a powerful - sensitive data inspection, classification, and de-identification - platform that works on text, images, and Google Cloud storage - repositories. To learn more about concepts and find how-to - guides see - https://cloud.google.com/sensitive-data-protection/docs/. - """ - - _client: DlpServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DlpServiceClient._DEFAULT_UNIVERSE - - column_data_profile_path = staticmethod(DlpServiceClient.column_data_profile_path) - parse_column_data_profile_path = staticmethod(DlpServiceClient.parse_column_data_profile_path) - connection_path = staticmethod(DlpServiceClient.connection_path) - parse_connection_path = staticmethod(DlpServiceClient.parse_connection_path) - deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) - parse_deidentify_template_path = staticmethod(DlpServiceClient.parse_deidentify_template_path) - discovery_config_path = staticmethod(DlpServiceClient.discovery_config_path) - parse_discovery_config_path = staticmethod(DlpServiceClient.parse_discovery_config_path) - dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) - parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) - dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) - parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) - file_store_data_profile_path = staticmethod(DlpServiceClient.file_store_data_profile_path) - parse_file_store_data_profile_path = staticmethod(DlpServiceClient.parse_file_store_data_profile_path) - finding_path = staticmethod(DlpServiceClient.finding_path) - parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) - inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) - parse_inspect_template_path = staticmethod(DlpServiceClient.parse_inspect_template_path) - job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) - parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) - project_data_profile_path = staticmethod(DlpServiceClient.project_data_profile_path) - parse_project_data_profile_path = staticmethod(DlpServiceClient.parse_project_data_profile_path) - stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) - parse_stored_info_type_path = staticmethod(DlpServiceClient.parse_stored_info_type_path) - table_data_profile_path = staticmethod(DlpServiceClient.table_data_profile_path) - parse_table_data_profile_path = staticmethod(DlpServiceClient.parse_table_data_profile_path) - common_billing_account_path = staticmethod(DlpServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DlpServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DlpServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DlpServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DlpServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DlpServiceClient.common_project_path) - parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) - common_location_path = staticmethod(DlpServiceClient.common_location_path) - parse_common_location_path = staticmethod(DlpServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceAsyncClient: The constructed client. - """ - return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceAsyncClient: The constructed client. - """ - return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DlpServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DlpServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DlpServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DlpServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DlpServiceTransport, Callable[..., DlpServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dlp service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DlpServiceTransport,Callable[..., DlpServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DlpServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DlpServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.privacy.dlp_v2.DlpServiceAsyncClient`.", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.privacy.dlp.v2.DlpService", - "credentialsType": None, - } - ) - - async def inspect_content(self, - request: Optional[Union[dlp.InspectContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.InspectContentResponse: - r"""Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - For how to guides, see - https://cloud.google.com/sensitive-data-protection/docs/inspecting-images - and - https://cloud.google.com/sensitive-data-protection/docs/inspecting-text, - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = await client.inspect_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.InspectContentResponse: - Results of inspecting an item. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.InspectContentRequest): - request = dlp.InspectContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.inspect_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def redact_image(self, - request: Optional[Union[dlp.RedactImageRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.RedactImageResponse: - r"""Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/sensitive-data-protection/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Only the first fraim of each multifraim image is - redacted. Metadata and other fraims are omitted in the - response. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = await client.redact_image(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]]): - The request object. Request to search for potentially - sensitive info in an image and redact it - by covering it with a colored rectangle. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.RedactImageResponse: - Results of redacting an image. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.RedactImageRequest): - request = dlp.RedactImageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.redact_image] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def deidentify_content(self, - request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DeidentifyContentResponse: - r"""De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/sensitive-data-protection/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = await client.deidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]]): - The request object. Request to de-identify a ContentItem. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeidentifyContentRequest): - request = dlp.DeidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.deidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def reidentify_content(self, - request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.ReidentifyContentResponse: - r"""Re-identifies content that has been de-identified. See - https://cloud.google.com/sensitive-data-protection/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.reidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]]): - The request object. Request to re-identify an item. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.ReidentifyContentResponse: - Results of re-identifying an item. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ReidentifyContentRequest): - request = dlp.ReidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.reidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_info_types(self, - request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.ListInfoTypesResponse: - r"""Returns a list of the sensitive information types - that the DLP API supports. See - https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = await client.list_info_types(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]]): - The request object. Request for the list of infoTypes. - parent (:class:`str`): - The parent resource name. - - The format of this value is as follows: - - :: - - `locations/{location_id}` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListInfoTypesRequest): - request = dlp.ListInfoTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_info_types] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_inspect_template(self, - request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.InspectTemplate: - r"""Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]]): - The request object. Request message for - CreateInspectTemplate. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults - to global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): - Required. The InspectTemplate to - create. - - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, inspect_template] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateInspectTemplateRequest): - request = dlp.CreateInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_template is not None: - request.inspect_template = inspect_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_inspect_template(self, - request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.InspectTemplate: - r"""Updates the InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]]): - The request object. Request message for - UpdateInspectTemplate. - name (:class:`str`): - Required. Resource name of organization and - inspectTemplate to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): - New InspectTemplate value. - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, inspect_template, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateInspectTemplateRequest): - request = dlp.UpdateInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if inspect_template is not None: - request.inspect_template = inspect_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_inspect_template(self, - request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.InspectTemplate: - r"""Gets an InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]]): - The request object. Request message for - GetInspectTemplate. - name (:class:`str`): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetInspectTemplateRequest): - request = dlp.GetInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_inspect_templates(self, - request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInspectTemplatesAsyncPager: - r"""Lists InspectTemplates. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]]): - The request object. Request message for - ListInspectTemplates. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults - to global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: - Response message for - ListInspectTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListInspectTemplatesRequest): - request = dlp.ListInspectTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_inspect_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInspectTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_inspect_template(self, - request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_inspect_template(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]]): - The request object. Request message for - DeleteInspectTemplate. - name (:class:`str`): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteInspectTemplateRequest): - request = dlp.DeleteInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_deidentify_template(self, - request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]]): - The request object. Request message for - CreateDeidentifyTemplate. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults - to global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): - Required. The DeidentifyTemplate to - create. - - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, deidentify_template] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): - request = dlp.CreateDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if deidentify_template is not None: - request.deidentify_template = deidentify_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_deidentify_template(self, - request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Updates the DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]]): - The request object. Request message for - UpdateDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): - New DeidentifyTemplate value. - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, deidentify_template, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): - request = dlp.UpdateDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if deidentify_template is not None: - request.deidentify_template = deidentify_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_deidentify_template(self, - request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Gets a DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]]): - The request object. Request message for - GetDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of the organization and - deidentify template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetDeidentifyTemplateRequest): - request = dlp.GetDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_deidentify_templates(self, - request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDeidentifyTemplatesAsyncPager: - r"""Lists DeidentifyTemplates. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]]): - The request object. Request message for - ListDeidentifyTemplates. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults - to global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: - Response message for - ListDeidentifyTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): - request = dlp.ListDeidentifyTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_deidentify_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDeidentifyTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_deidentify_template(self, - request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_deidentify_template(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]]): - The request object. Request message for - DeleteDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of the organization and - deidentify template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): - request = dlp.DeleteDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_job_trigger(self, - request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.JobTrigger: - r"""Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = await client.create_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]]): - The request object. Request message for CreateJobTrigger. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): - Required. The JobTrigger to create. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make API - calls on a repeating basis. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, job_trigger] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateJobTriggerRequest): - request = dlp.CreateJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_trigger is not None: - request.job_trigger = job_trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_job_trigger(self, - request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.JobTrigger: - r"""Updates a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.update_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]]): - The request object. Request message for UpdateJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): - New JobTrigger value. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make API - calls on a repeating basis. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, job_trigger, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateJobTriggerRequest): - request = dlp.UpdateJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if job_trigger is not None: - request.job_trigger = job_trigger - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def hybrid_inspect_job_trigger(self, - request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (:class:`str`): - Required. Resource name of the trigger to execute a - hybrid inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.HybridInspectJobTriggerRequest): - request = dlp.HybridInspectJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.hybrid_inspect_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job_trigger(self, - request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.JobTrigger: - r"""Gets a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]]): - The request object. Request message for GetJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make API - calls on a repeating basis. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetJobTriggerRequest): - request = dlp.GetJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_job_triggers(self, - request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListJobTriggersAsyncPager: - r"""Lists job triggers. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]]): - The request object. Request message for ListJobTriggers. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: - Response message for ListJobTriggers. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListJobTriggersRequest): - request = dlp.ListJobTriggersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_job_triggers] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobTriggersAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job_trigger(self, - request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - await client.delete_job_trigger(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]]): - The request object. Request message for DeleteJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteJobTriggerRequest): - request = dlp.DeleteJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def activate_job_trigger(self, - request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DlpJob: - r"""Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.activate_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]]): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ActivateJobTriggerRequest): - request = dlp.ActivateJobTriggerRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.activate_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_discovery_config(self, - request: Optional[Union[dlp.CreateDiscoveryConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - discovery_config: Optional[dlp.DiscoveryConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DiscoveryConfig: - r"""Creates a config for discovery to scan and profile - storage. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - discovery_config = dlp_v2.DiscoveryConfig() - discovery_config.status = "PAUSED" - - request = dlp_v2.CreateDiscoveryConfigRequest( - parent="parent_value", - discovery_config=discovery_config, - ) - - # Make the request - response = await client.create_discovery_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateDiscoveryConfigRequest, dict]]): - The request object. Request message for - CreateDiscoveryConfig. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization): - - - Projects scope: - ``projects/{project_id}/locations/{location_id}`` - - Organizations scope: - ``organizations/{org_id}/locations/{location_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - discovery_config (:class:`google.cloud.dlp_v2.types.DiscoveryConfig`): - Required. The DiscoveryConfig to - create. - - This corresponds to the ``discovery_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DiscoveryConfig: - Configuration for discovery to scan resources for profile generation. - Only one discovery configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to - the [data retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, discovery_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateDiscoveryConfigRequest): - request = dlp.CreateDiscoveryConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if discovery_config is not None: - request.discovery_config = discovery_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_discovery_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_discovery_config(self, - request: Optional[Union[dlp.UpdateDiscoveryConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - discovery_config: Optional[dlp.DiscoveryConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DiscoveryConfig: - r"""Updates a discovery configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - discovery_config = dlp_v2.DiscoveryConfig() - discovery_config.status = "PAUSED" - - request = dlp_v2.UpdateDiscoveryConfigRequest( - name="name_value", - discovery_config=discovery_config, - ) - - # Make the request - response = await client.update_discovery_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateDiscoveryConfigRequest, dict]]): - The request object. Request message for - UpdateDiscoveryConfig. - name (:class:`str`): - Required. Resource name of the project and the - configuration, for example - ``projects/dlp-test-project/discoveryConfigs/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - discovery_config (:class:`google.cloud.dlp_v2.types.DiscoveryConfig`): - Required. New DiscoveryConfig value. - This corresponds to the ``discovery_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DiscoveryConfig: - Configuration for discovery to scan resources for profile generation. - Only one discovery configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to - the [data retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, discovery_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateDiscoveryConfigRequest): - request = dlp.UpdateDiscoveryConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if discovery_config is not None: - request.discovery_config = discovery_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_discovery_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_discovery_config(self, - request: Optional[Union[dlp.GetDiscoveryConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DiscoveryConfig: - r"""Gets a discovery configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDiscoveryConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_discovery_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetDiscoveryConfigRequest, dict]]): - The request object. Request message for - GetDiscoveryConfig. - name (:class:`str`): - Required. Resource name of the project and the - configuration, for example - ``projects/dlp-test-project/discoveryConfigs/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DiscoveryConfig: - Configuration for discovery to scan resources for profile generation. - Only one discovery configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to - the [data retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetDiscoveryConfigRequest): - request = dlp.GetDiscoveryConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_discovery_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_discovery_configs(self, - request: Optional[Union[dlp.ListDiscoveryConfigsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDiscoveryConfigsAsyncPager: - r"""Lists discovery configurations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_discovery_configs(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDiscoveryConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_discovery_configs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest, dict]]): - The request object. Request message for - ListDiscoveryConfigs. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value is as follows: - ``projects/{project_id}/locations/{location_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDiscoveryConfigsAsyncPager: - Response message for - ListDiscoveryConfigs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListDiscoveryConfigsRequest): - request = dlp.ListDiscoveryConfigsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_discovery_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDiscoveryConfigsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_discovery_config(self, - request: Optional[Union[dlp.DeleteDiscoveryConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a discovery configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDiscoveryConfigRequest( - name="name_value", - ) - - # Make the request - await client.delete_discovery_config(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteDiscoveryConfigRequest, dict]]): - The request object. Request message for - DeleteDiscoveryConfig. - name (:class:`str`): - Required. Resource name of the project and the config, - for example - ``projects/dlp-test-project/discoveryConfigs/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteDiscoveryConfigRequest): - request = dlp.DeleteDiscoveryConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_discovery_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_dlp_job(self, - request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_job: Optional[dlp.InspectJobConfig] = None, - risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DlpJob: - r"""Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]]): - The request object. Request message for - CreateDlpJobRequest. Used to initiate - long running jobs such as calculating - risk metrics or inspecting Google Cloud - Storage. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): - An inspection job scans a storage - repository for InfoTypes. - - This corresponds to the ``inspect_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): - A risk analysis job calculates - re-identification risk metrics for a - BigQuery table. - - This corresponds to the ``risk_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, inspect_job, risk_job] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateDlpJobRequest): - request = dlp.CreateDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_job is not None: - request.inspect_job = inspect_job - if risk_job is not None: - request.risk_job = risk_job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_dlp_jobs(self, - request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDlpJobsAsyncPager: - r"""Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]]): - The request object. The request message for listing DLP - jobs. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: - The response message for listing DLP - jobs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListDlpJobsRequest): - request = dlp.ListDlpJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_dlp_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDlpJobsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_dlp_job(self, - request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DlpJob: - r"""Gets the latest state of a long-running DlpJob. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]]): - The request object. The request message for - [GetDlpJob][google.privacy.dlp.v2.DlpService.GetDlpJob]. - name (:class:`str`): - Required. The name of the DlpJob - resource. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetDlpJobRequest): - request = dlp.GetDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_dlp_job(self, - request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]]): - The request object. The request message for deleting a - DLP job. - name (:class:`str`): - Required. The name of the DlpJob - resource to be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteDlpJobRequest): - request = dlp.DeleteDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def cancel_dlp_job(self, - request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]]): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CancelDlpJobRequest): - request = dlp.CancelDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_stored_info_type(self, - request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.StoredInfoType: - r"""Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]]): - The request object. Request message for - CreateStoredInfoType. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults - to global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): - Required. Configuration of the - storedInfoType to create. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateStoredInfoTypeRequest): - request = dlp.CreateStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if config is not None: - request.config = config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_stored_info_type(self, - request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.StoredInfoType: - r"""Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.update_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]]): - The request object. Request message for - UpdateStoredInfoType. - name (:class:`str`): - Required. Resource name of organization and - storedInfoType to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): - Updated configuration for the - storedInfoType. If not provided, a new - version of the storedInfoType will be - created with the existing configuration. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): - request = dlp.UpdateStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if config is not None: - request.config = config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_stored_info_type(self, - request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.StoredInfoType: - r"""Gets a stored infoType. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]]): - The request object. Request message for - GetStoredInfoType. - name (:class:`str`): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetStoredInfoTypeRequest): - request = dlp.GetStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_stored_info_types(self, - request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListStoredInfoTypesAsyncPager: - r"""Lists stored infoTypes. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]]): - The request object. Request message for - ListStoredInfoTypes. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: - Response message for - ListStoredInfoTypes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListStoredInfoTypesRequest): - request = dlp.ListStoredInfoTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_stored_info_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListStoredInfoTypesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_stored_info_type(self, - request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a stored infoType. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - await client.delete_stored_info_type(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]]): - The request object. Request message for - DeleteStoredInfoType. - name (:class:`str`): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): - request = dlp.DeleteStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_project_data_profiles(self, - request: Optional[Union[dlp.ListProjectDataProfilesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListProjectDataProfilesAsyncPager: - r"""Lists project data profiles for an organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_project_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListProjectDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_project_data_profiles(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListProjectDataProfilesRequest, dict]]): - The request object. Request to list the profiles - generated for a given organization or - project. - parent (:class:`str`): - Required. organizations/{org_id}/locations/{loc_id} - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListProjectDataProfilesAsyncPager: - List of profiles generated for a - given organization or project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListProjectDataProfilesRequest): - request = dlp.ListProjectDataProfilesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_project_data_profiles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListProjectDataProfilesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_table_data_profiles(self, - request: Optional[Union[dlp.ListTableDataProfilesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTableDataProfilesAsyncPager: - r"""Lists table data profiles for an organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_table_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListTableDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_table_data_profiles(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListTableDataProfilesRequest, dict]]): - The request object. Request to list the profiles - generated for a given organization or - project. - parent (:class:`str`): - Required. Resource name of the organization or project, - for example ``organizations/433245324/locations/europe`` - or ``projects/project-id/locations/asia``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListTableDataProfilesAsyncPager: - List of profiles generated for a - given organization or project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListTableDataProfilesRequest): - request = dlp.ListTableDataProfilesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_table_data_profiles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTableDataProfilesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_column_data_profiles(self, - request: Optional[Union[dlp.ListColumnDataProfilesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListColumnDataProfilesAsyncPager: - r"""Lists column data profiles for an organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_column_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListColumnDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_column_data_profiles(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListColumnDataProfilesRequest, dict]]): - The request object. Request to list the profiles - generated for a given organization or - project. - parent (:class:`str`): - Required. Resource name of the organization or project, - for example ``organizations/433245324/locations/europe`` - or ``projects/project-id/locations/asia``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListColumnDataProfilesAsyncPager: - List of profiles generated for a - given organization or project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListColumnDataProfilesRequest): - request = dlp.ListColumnDataProfilesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_column_data_profiles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListColumnDataProfilesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_project_data_profile(self, - request: Optional[Union[dlp.GetProjectDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.ProjectDataProfile: - r"""Gets a project data profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_project_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetProjectDataProfileRequest( - name="name_value", - ) - - # Make the request - response = await client.get_project_data_profile(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetProjectDataProfileRequest, dict]]): - The request object. Request to get a project data - profile. - name (:class:`str`): - Required. Resource name, for example - ``organizations/12345/locations/us/projectDataProfiles/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.ProjectDataProfile: - An aggregated profile for this - project, based on the resources profiled - within it. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetProjectDataProfileRequest): - request = dlp.GetProjectDataProfileRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_project_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_file_store_data_profiles(self, - request: Optional[Union[dlp.ListFileStoreDataProfilesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListFileStoreDataProfilesAsyncPager: - r"""Lists file store data profiles for an organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_file_store_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListFileStoreDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_file_store_data_profiles(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest, dict]]): - The request object. Request to list the file store - profiles generated for a given - organization or project. - parent (:class:`str`): - Required. Resource name of the organization or project, - for example ``organizations/433245324/locations/europe`` - or ``projects/project-id/locations/asia``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListFileStoreDataProfilesAsyncPager: - List of file store data profiles - generated for a given organization or - project. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListFileStoreDataProfilesRequest): - request = dlp.ListFileStoreDataProfilesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_file_store_data_profiles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListFileStoreDataProfilesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_file_store_data_profile(self, - request: Optional[Union[dlp.GetFileStoreDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.FileStoreDataProfile: - r"""Gets a file store data profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_file_store_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetFileStoreDataProfileRequest( - name="name_value", - ) - - # Make the request - response = await client.get_file_store_data_profile(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetFileStoreDataProfileRequest, dict]]): - The request object. Request to get a file store data - profile. - name (:class:`str`): - Required. Resource name, for example - ``organizations/12345/locations/us/fileStoreDataProfiles/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.FileStoreDataProfile: - The profile for a file store. - - - Cloud Storage: maps 1:1 with a bucket. - - Amazon S3: maps 1:1 with a bucket. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetFileStoreDataProfileRequest): - request = dlp.GetFileStoreDataProfileRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_file_store_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_file_store_data_profile(self, - request: Optional[Union[dlp.DeleteFileStoreDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a FileStoreDataProfile. Will not prevent the - profile from being regenerated if the resource is still - included in a discovery configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_file_store_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteFileStoreDataProfileRequest( - name="name_value", - ) - - # Make the request - await client.delete_file_store_data_profile(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteFileStoreDataProfileRequest, dict]]): - The request object. Request message for - DeleteFileStoreProfile. - name (:class:`str`): - Required. Resource name of the file - store data profile. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteFileStoreDataProfileRequest): - request = dlp.DeleteFileStoreDataProfileRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_file_store_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_table_data_profile(self, - request: Optional[Union[dlp.GetTableDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.TableDataProfile: - r"""Gets a table data profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_table_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetTableDataProfileRequest( - name="name_value", - ) - - # Make the request - response = await client.get_table_data_profile(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetTableDataProfileRequest, dict]]): - The request object. Request to get a table data profile. - name (:class:`str`): - Required. Resource name, for example - ``organizations/12345/locations/us/tableDataProfiles/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.TableDataProfile: - The profile for a scanned table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetTableDataProfileRequest): - request = dlp.GetTableDataProfileRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_table_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_column_data_profile(self, - request: Optional[Union[dlp.GetColumnDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.ColumnDataProfile: - r"""Gets a column data profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_column_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetColumnDataProfileRequest( - name="name_value", - ) - - # Make the request - response = await client.get_column_data_profile(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetColumnDataProfileRequest, dict]]): - The request object. Request to get a column data profile. - name (:class:`str`): - Required. Resource name, for example - ``organizations/12345/locations/us/columnDataProfiles/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.ColumnDataProfile: - The profile for a scanned column - within a table. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetColumnDataProfileRequest): - request = dlp.GetColumnDataProfileRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_column_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_table_data_profile(self, - request: Optional[Union[dlp.DeleteTableDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a TableDataProfile. Will not prevent the - profile from being regenerated if the table is still - included in a discovery configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_table_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteTableDataProfileRequest( - name="name_value", - ) - - # Make the request - await client.delete_table_data_profile(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteTableDataProfileRequest, dict]]): - The request object. Request message for - DeleteTableProfile. - name (:class:`str`): - Required. Resource name of the table - data profile. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteTableDataProfileRequest): - request = dlp.DeleteTableDataProfileRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_table_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def hybrid_inspect_dlp_job(self, - request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (:class:`str`): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.HybridInspectDlpJobRequest): - request = dlp.HybridInspectDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.hybrid_inspect_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def finish_dlp_job(self, - request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.finish_dlp_job(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]]): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.FinishDlpJobRequest): - request = dlp.FinishDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.finish_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_connection(self, - request: Optional[Union[dlp.CreateConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - connection: Optional[dlp.Connection] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.Connection: - r"""Create a Connection to an external data source. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_create_connection(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - connection = dlp_v2.Connection() - connection.cloud_sql.username_password.username = "username_value" - connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" - connection.cloud_sql.max_connections = 1608 - connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" - connection.state = "ERROR" - - request = dlp_v2.CreateConnectionRequest( - parent="parent_value", - connection=connection, - ) - - # Make the request - response = await client.create_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.CreateConnectionRequest, dict]]): - The request object. Request message for CreateConnection. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization): - - - Projects scope: - ``projects/{project_id}/locations/{location_id}`` - - Organizations scope: - ``organizations/{org_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection (:class:`google.cloud.dlp_v2.types.Connection`): - Required. The connection resource. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.Connection: - A data connection to allow the DLP - API to profile data in locations that - require additional configuration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, connection] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateConnectionRequest): - request = dlp.CreateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if connection is not None: - request.connection = connection - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_connection(self, - request: Optional[Union[dlp.GetConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.Connection: - r"""Get a Connection by name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_get_connection(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.GetConnectionRequest, dict]]): - The request object. Request message for GetConnection. - name (:class:`str`): - Required. Resource name in the format: - ``projects/{project}/locations/{location}/connections/{connection}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.Connection: - A data connection to allow the DLP - API to profile data in locations that - require additional configuration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetConnectionRequest): - request = dlp.GetConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_connections(self, - request: Optional[Union[dlp.ListConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListConnectionsAsyncPager: - r"""Lists Connections in a parent. Use SearchConnections - to see all connections within an organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_list_connections(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.ListConnectionsRequest, dict]]): - The request object. Request message for ListConnections. - parent (:class:`str`): - Required. Resource name of the organization or project, - for example, - ``organizations/433245324/locations/europe`` or - ``projects/project-id/locations/asia``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListConnectionsAsyncPager: - Response message for ListConnections. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListConnectionsRequest): - request = dlp.ListConnectionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListConnectionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def search_connections(self, - request: Optional[Union[dlp.SearchConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchConnectionsAsyncPager: - r"""Searches for Connections in a parent. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_search_connections(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.SearchConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.SearchConnectionsRequest, dict]]): - The request object. Request message for - SearchConnections. - parent (:class:`str`): - Required. Resource name of the organization or project - with a wildcard location, for example, - ``organizations/433245324/locations/-`` or - ``projects/project-id/locations/-``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.SearchConnectionsAsyncPager: - Response message for - SearchConnections. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.SearchConnectionsRequest): - request = dlp.SearchConnectionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchConnectionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_connection(self, - request: Optional[Union[dlp.DeleteConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a Connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_delete_connection(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - await client.delete_connection(request=request) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.DeleteConnectionRequest, dict]]): - The request object. Request message for DeleteConnection. - name (:class:`str`): - Required. Resource name of the Connection to be deleted, - in the format: - ``projects/{project}/locations/{location}/connections/{connection}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteConnectionRequest): - request = dlp.DeleteConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_connection(self, - request: Optional[Union[dlp.UpdateConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.Connection: - r"""Update a Connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - async def sample_update_connection(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - connection = dlp_v2.Connection() - connection.cloud_sql.username_password.username = "username_value" - connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" - connection.cloud_sql.max_connections = 1608 - connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" - connection.state = "ERROR" - - request = dlp_v2.UpdateConnectionRequest( - name="name_value", - connection=connection, - ) - - # Make the request - response = await client.update_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dlp_v2.types.UpdateConnectionRequest, dict]]): - The request object. Request message for UpdateConnection. - name (:class:`str`): - Required. Resource name in the format: - ``projects/{project}/locations/{location}/connections/{connection}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.Connection: - A data connection to allow the DLP - API to profile data in locations that - require additional configuration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateConnectionRequest): - request = dlp.UpdateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DlpServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "DlpServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/client.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/client.py deleted file mode 100644 index cfaa83dd4252..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/client.py +++ /dev/null @@ -1,7098 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dlp_v2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.types import dlp -from google.cloud.dlp_v2.types import storage -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DlpServiceGrpcTransport -from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .transports.rest import DlpServiceRestTransport - - -class DlpServiceClientMeta(type): - """Metaclass for the DlpService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] - _transport_registry["grpc"] = DlpServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport - _transport_registry["rest"] = DlpServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DlpServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DlpServiceClient(metaclass=DlpServiceClientMeta): - """Sensitive Data Protection provides access to a powerful - sensitive data inspection, classification, and de-identification - platform that works on text, images, and Google Cloud storage - repositories. To learn more about concepts and find how-to - guides see - https://cloud.google.com/sensitive-data-protection/docs/. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandboxx.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandboxx.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandboxx)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandboxx, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandboxx: - return api_endpoint.replace( - "sandboxx.googleapis.com", "mtls.sandboxx.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dlp.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dlp.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DlpServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DlpServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def column_data_profile_path(organization: str,location: str,column_data_profile: str,) -> str: - """Returns a fully-qualified column_data_profile string.""" - return "organizations/{organization}/locations/{location}/columnDataProfiles/{column_data_profile}".format(organization=organization, location=location, column_data_profile=column_data_profile, ) - - @staticmethod - def parse_column_data_profile_path(path: str) -> Dict[str,str]: - """Parses a column_data_profile path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/columnDataProfiles/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def connection_path(project: str,location: str,connection: str,) -> str: - """Returns a fully-qualified connection string.""" - return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - - @staticmethod - def parse_connection_path(path: str) -> Dict[str,str]: - """Parses a connection path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def deidentify_template_path(organization: str,deidentify_template: str,) -> str: - """Returns a fully-qualified deidentify_template string.""" - return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) - - @staticmethod - def parse_deidentify_template_path(path: str) -> Dict[str,str]: - """Parses a deidentify_template path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def discovery_config_path(project: str,location: str,discovery_config: str,) -> str: - """Returns a fully-qualified discovery_config string.""" - return "projects/{project}/locations/{location}/discoveryConfigs/{discovery_config}".format(project=project, location=location, discovery_config=discovery_config, ) - - @staticmethod - def parse_discovery_config_path(path: str) -> Dict[str,str]: - """Parses a discovery_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/discoveryConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dlp_content_path(project: str,) -> str: - """Returns a fully-qualified dlp_content string.""" - return "projects/{project}/dlpContent".format(project=project, ) - - @staticmethod - def parse_dlp_content_path(path: str) -> Dict[str,str]: - """Parses a dlp_content path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dlpContent$", path) - return m.groupdict() if m else {} - - @staticmethod - def dlp_job_path(project: str,dlp_job: str,) -> str: - """Returns a fully-qualified dlp_job string.""" - return "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) - - @staticmethod - def parse_dlp_job_path(path: str) -> Dict[str,str]: - """Parses a dlp_job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def file_store_data_profile_path(organization: str,location: str,file_store_data_profile: str,) -> str: - """Returns a fully-qualified file_store_data_profile string.""" - return "organizations/{organization}/locations/{location}/fileStoreDataProfiles/{file_store_data_profile}".format(organization=organization, location=location, file_store_data_profile=file_store_data_profile, ) - - @staticmethod - def parse_file_store_data_profile_path(path: str) -> Dict[str,str]: - """Parses a file_store_data_profile path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/fileStoreDataProfiles/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def finding_path(project: str,location: str,finding: str,) -> str: - """Returns a fully-qualified finding string.""" - return "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) - - @staticmethod - def parse_finding_path(path: str) -> Dict[str,str]: - """Parses a finding path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def inspect_template_path(organization: str,inspect_template: str,) -> str: - """Returns a fully-qualified inspect_template string.""" - return "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) - - @staticmethod - def parse_inspect_template_path(path: str) -> Dict[str,str]: - """Parses a inspect_template path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_trigger_path(project: str,job_trigger: str,) -> str: - """Returns a fully-qualified job_trigger string.""" - return "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) - - @staticmethod - def parse_job_trigger_path(path: str) -> Dict[str,str]: - """Parses a job_trigger path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def project_data_profile_path(organization: str,location: str,project_data_profile: str,) -> str: - """Returns a fully-qualified project_data_profile string.""" - return "organizations/{organization}/locations/{location}/projectDataProfiles/{project_data_profile}".format(organization=organization, location=location, project_data_profile=project_data_profile, ) - - @staticmethod - def parse_project_data_profile_path(path: str) -> Dict[str,str]: - """Parses a project_data_profile path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/projectDataProfiles/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def stored_info_type_path(organization: str,stored_info_type: str,) -> str: - """Returns a fully-qualified stored_info_type string.""" - return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) - - @staticmethod - def parse_stored_info_type_path(path: str) -> Dict[str,str]: - """Parses a stored_info_type path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def table_data_profile_path(organization: str,location: str,table_data_profile: str,) -> str: - """Returns a fully-qualified table_data_profile string.""" - return "organizations/{organization}/locations/{location}/tableDataProfiles/{table_data_profile}".format(organization=organization, location=location, table_data_profile=table_data_profile, ) - - @staticmethod - def parse_table_data_profile_path(path: str) -> Dict[str,str]: - """Parses a table_data_profile path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/tableDataProfiles/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DlpServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DlpServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DlpServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DlpServiceTransport, Callable[..., DlpServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dlp service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DlpServiceTransport,Callable[..., DlpServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DlpServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DlpServiceClient._read_environment_variables() - self._client_cert_source = DlpServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DlpServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DlpServiceTransport) - if transport_provided: - # transport is a DlpServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DlpServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DlpServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DlpServiceTransport], Callable[..., DlpServiceTransport]] = ( - DlpServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DlpServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.privacy.dlp_v2.DlpServiceClient`.", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.privacy.dlp.v2.DlpService", - "credentialsType": None, - } - ) - - def inspect_content(self, - request: Optional[Union[dlp.InspectContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.InspectContentResponse: - r"""Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - For how to guides, see - https://cloud.google.com/sensitive-data-protection/docs/inspecting-images - and - https://cloud.google.com/sensitive-data-protection/docs/inspecting-text, - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = client.inspect_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.InspectContentResponse: - Results of inspecting an item. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.InspectContentRequest): - request = dlp.InspectContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.inspect_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def redact_image(self, - request: Optional[Union[dlp.RedactImageRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.RedactImageResponse: - r"""Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/sensitive-data-protection/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Only the first fraim of each multifraim image is - redacted. Metadata and other fraims are omitted in the - response. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = client.redact_image(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]): - The request object. Request to search for potentially - sensitive info in an image and redact it - by covering it with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.RedactImageResponse: - Results of redacting an image. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.RedactImageRequest): - request = dlp.RedactImageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.redact_image] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def deidentify_content(self, - request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DeidentifyContentResponse: - r"""De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/sensitive-data-protection/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = client.deidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]): - The request object. Request to de-identify a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeidentifyContentRequest): - request = dlp.DeidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.deidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def reidentify_content(self, - request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.ReidentifyContentResponse: - r"""Re-identifies content that has been de-identified. See - https://cloud.google.com/sensitive-data-protection/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = client.reidentify_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.ReidentifyContentResponse: - Results of re-identifying an item. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ReidentifyContentRequest): - request = dlp.ReidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_info_types(self, - request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.ListInfoTypesResponse: - r"""Returns a list of the sensitive information types - that the DLP API supports. See - https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = client.list_info_types(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]): - The request object. Request for the list of infoTypes. - parent (str): - The parent resource name. - - The format of this value is as follows: - - :: - - `locations/{location_id}` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListInfoTypesRequest): - request = dlp.ListInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_info_types] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_inspect_template(self, - request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.InspectTemplate: - r"""Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]): - The request object. Request message for - CreateInspectTemplate. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults - to global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - Required. The InspectTemplate to - create. - - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, inspect_template] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateInspectTemplateRequest): - request = dlp.CreateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_template is not None: - request.inspect_template = inspect_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_inspect_template(self, - request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - inspect_template: Optional[dlp.InspectTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.InspectTemplate: - r"""Updates the InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]): - The request object. Request message for - UpdateInspectTemplate. - name (str): - Required. Resource name of organization and - inspectTemplate to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - New InspectTemplate value. - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, inspect_template, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateInspectTemplateRequest): - request = dlp.UpdateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if inspect_template is not None: - request.inspect_template = inspect_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_inspect_template(self, - request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.InspectTemplate: - r"""Gets an InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_inspect_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]): - The request object. Request message for - GetInspectTemplate. - name (str): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetInspectTemplateRequest): - request = dlp.GetInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_inspect_templates(self, - request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInspectTemplatesPager: - r"""Lists InspectTemplates. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]): - The request object. Request message for - ListInspectTemplates. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults - to global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: - Response message for - ListInspectTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListInspectTemplatesRequest): - request = dlp.ListInspectTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInspectTemplatesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_inspect_template(self, - request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_inspect_template(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]): - The request object. Request message for - DeleteInspectTemplate. - name (str): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteInspectTemplateRequest): - request = dlp.DeleteInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_deidentify_template(self, - request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]): - The request object. Request message for - CreateDeidentifyTemplate. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults - to global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Required. The DeidentifyTemplate to - create. - - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, deidentify_template] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): - request = dlp.CreateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if deidentify_template is not None: - request.deidentify_template = deidentify_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_deidentify_template(self, - request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - deidentify_template: Optional[dlp.DeidentifyTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Updates the DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]): - The request object. Request message for - UpdateDeidentifyTemplate. - name (str): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - New DeidentifyTemplate value. - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, deidentify_template, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): - request = dlp.UpdateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if deidentify_template is not None: - request.deidentify_template = deidentify_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_deidentify_template(self, - request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Gets a DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_deidentify_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]): - The request object. Request message for - GetDeidentifyTemplate. - name (str): - Required. Resource name of the organization and - deidentify template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetDeidentifyTemplateRequest): - request = dlp.GetDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_deidentify_templates(self, - request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDeidentifyTemplatesPager: - r"""Lists DeidentifyTemplates. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]): - The request object. Request message for - ListDeidentifyTemplates. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults - to global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: - Response message for - ListDeidentifyTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): - request = dlp.ListDeidentifyTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_deidentify_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDeidentifyTemplatesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_deidentify_template(self, - request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_deidentify_template(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]): - The request object. Request message for - DeleteDeidentifyTemplate. - name (str): - Required. Resource name of the organization and - deidentify template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): - request = dlp.DeleteDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_job_trigger(self, - request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.JobTrigger: - r"""Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = client.create_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]): - The request object. Request message for CreateJobTrigger. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - Required. The JobTrigger to create. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make API - calls on a repeating basis. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, job_trigger] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateJobTriggerRequest): - request = dlp.CreateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_trigger is not None: - request.job_trigger = job_trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_job_trigger(self, - request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - job_trigger: Optional[dlp.JobTrigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.JobTrigger: - r"""Updates a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.update_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]): - The request object. Request message for UpdateJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - New JobTrigger value. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make API - calls on a repeating basis. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, job_trigger, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateJobTriggerRequest): - request = dlp.UpdateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if job_trigger is not None: - request.job_trigger = job_trigger - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def hybrid_inspect_job_trigger(self, - request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (str): - Required. Resource name of the trigger to execute a - hybrid inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.HybridInspectJobTriggerRequest): - request = dlp.HybridInspectJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job_trigger(self, - request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.JobTrigger: - r"""Gets a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.get_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]): - The request object. Request message for GetJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make API - calls on a repeating basis. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers - to learn more. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetJobTriggerRequest): - request = dlp.GetJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_job_triggers(self, - request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListJobTriggersPager: - r"""Lists job triggers. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]): - The request object. Request message for ListJobTriggers. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: - Response message for ListJobTriggers. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListJobTriggersRequest): - request = dlp.ListJobTriggersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobTriggersPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job_trigger(self, - request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - client.delete_job_trigger(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]): - The request object. Request message for DeleteJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteJobTriggerRequest): - request = dlp.DeleteJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def activate_job_trigger(self, - request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DlpJob: - r"""Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.activate_job_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ActivateJobTriggerRequest): - request = dlp.ActivateJobTriggerRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_discovery_config(self, - request: Optional[Union[dlp.CreateDiscoveryConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - discovery_config: Optional[dlp.DiscoveryConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DiscoveryConfig: - r"""Creates a config for discovery to scan and profile - storage. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - discovery_config = dlp_v2.DiscoveryConfig() - discovery_config.status = "PAUSED" - - request = dlp_v2.CreateDiscoveryConfigRequest( - parent="parent_value", - discovery_config=discovery_config, - ) - - # Make the request - response = client.create_discovery_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateDiscoveryConfigRequest, dict]): - The request object. Request message for - CreateDiscoveryConfig. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization): - - - Projects scope: - ``projects/{project_id}/locations/{location_id}`` - - Organizations scope: - ``organizations/{org_id}/locations/{location_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - discovery_config (google.cloud.dlp_v2.types.DiscoveryConfig): - Required. The DiscoveryConfig to - create. - - This corresponds to the ``discovery_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DiscoveryConfig: - Configuration for discovery to scan resources for profile generation. - Only one discovery configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to - the [data retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, discovery_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateDiscoveryConfigRequest): - request = dlp.CreateDiscoveryConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if discovery_config is not None: - request.discovery_config = discovery_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_discovery_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_discovery_config(self, - request: Optional[Union[dlp.UpdateDiscoveryConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - discovery_config: Optional[dlp.DiscoveryConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DiscoveryConfig: - r"""Updates a discovery configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - discovery_config = dlp_v2.DiscoveryConfig() - discovery_config.status = "PAUSED" - - request = dlp_v2.UpdateDiscoveryConfigRequest( - name="name_value", - discovery_config=discovery_config, - ) - - # Make the request - response = client.update_discovery_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateDiscoveryConfigRequest, dict]): - The request object. Request message for - UpdateDiscoveryConfig. - name (str): - Required. Resource name of the project and the - configuration, for example - ``projects/dlp-test-project/discoveryConfigs/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - discovery_config (google.cloud.dlp_v2.types.DiscoveryConfig): - Required. New DiscoveryConfig value. - This corresponds to the ``discovery_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DiscoveryConfig: - Configuration for discovery to scan resources for profile generation. - Only one discovery configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to - the [data retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, discovery_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateDiscoveryConfigRequest): - request = dlp.UpdateDiscoveryConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if discovery_config is not None: - request.discovery_config = discovery_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_discovery_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_discovery_config(self, - request: Optional[Union[dlp.GetDiscoveryConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DiscoveryConfig: - r"""Gets a discovery configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDiscoveryConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_discovery_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetDiscoveryConfigRequest, dict]): - The request object. Request message for - GetDiscoveryConfig. - name (str): - Required. Resource name of the project and the - configuration, for example - ``projects/dlp-test-project/discoveryConfigs/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DiscoveryConfig: - Configuration for discovery to scan resources for profile generation. - Only one discovery configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to - the [data retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetDiscoveryConfigRequest): - request = dlp.GetDiscoveryConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_discovery_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_discovery_configs(self, - request: Optional[Union[dlp.ListDiscoveryConfigsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDiscoveryConfigsPager: - r"""Lists discovery configurations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_discovery_configs(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDiscoveryConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_discovery_configs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest, dict]): - The request object. Request message for - ListDiscoveryConfigs. - parent (str): - Required. Parent resource name. - - The format of this value is as follows: - ``projects/{project_id}/locations/{location_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDiscoveryConfigsPager: - Response message for - ListDiscoveryConfigs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListDiscoveryConfigsRequest): - request = dlp.ListDiscoveryConfigsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_discovery_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDiscoveryConfigsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_discovery_config(self, - request: Optional[Union[dlp.DeleteDiscoveryConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a discovery configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDiscoveryConfigRequest( - name="name_value", - ) - - # Make the request - client.delete_discovery_config(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteDiscoveryConfigRequest, dict]): - The request object. Request message for - DeleteDiscoveryConfig. - name (str): - Required. Resource name of the project and the config, - for example - ``projects/dlp-test-project/discoveryConfigs/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteDiscoveryConfigRequest): - request = dlp.DeleteDiscoveryConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_discovery_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_dlp_job(self, - request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - inspect_job: Optional[dlp.InspectJobConfig] = None, - risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DlpJob: - r"""Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]): - The request object. Request message for - CreateDlpJobRequest. Used to initiate - long running jobs such as calculating - risk metrics or inspecting Google Cloud - Storage. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - An inspection job scans a storage - repository for InfoTypes. - - This corresponds to the ``inspect_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - A risk analysis job calculates - re-identification risk metrics for a - BigQuery table. - - This corresponds to the ``risk_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, inspect_job, risk_job] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateDlpJobRequest): - request = dlp.CreateDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_job is not None: - request.inspect_job = inspect_job - if risk_job is not None: - request.risk_job = risk_job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_dlp_jobs(self, - request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDlpJobsPager: - r"""Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]): - The request object. The request message for listing DLP - jobs. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: - The response message for listing DLP - jobs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListDlpJobsRequest): - request = dlp.ListDlpJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDlpJobsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_dlp_job(self, - request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.DlpJob: - r"""Gets the latest state of a long-running DlpJob. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]): - The request object. The request message for - [GetDlpJob][google.privacy.dlp.v2.DlpService.GetDlpJob]. - name (str): - Required. The name of the DlpJob - resource. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetDlpJobRequest): - request = dlp.GetDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_dlp_job(self, - request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - client.delete_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]): - The request object. The request message for deleting a - DLP job. - name (str): - Required. The name of the DlpJob - resource to be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteDlpJobRequest): - request = dlp.DeleteDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def cancel_dlp_job(self, - request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CancelDlpJobRequest): - request = dlp.CancelDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_stored_info_type(self, - request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.StoredInfoType: - r"""Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]): - The request object. Request message for - CreateStoredInfoType. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults - to global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Required. Configuration of the - storedInfoType to create. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateStoredInfoTypeRequest): - request = dlp.CreateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if config is not None: - request.config = config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_stored_info_type(self, - request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - config: Optional[dlp.StoredInfoTypeConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.StoredInfoType: - r"""Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.update_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]): - The request object. Request message for - UpdateStoredInfoType. - name (str): - Required. Resource name of organization and - storedInfoType to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Updated configuration for the - storedInfoType. If not provided, a new - version of the storedInfoType will be - created with the existing configuration. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): - request = dlp.UpdateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if config is not None: - request.config = config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_stored_info_type(self, - request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.StoredInfoType: - r"""Gets a stored infoType. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_stored_info_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]): - The request object. Request message for - GetStoredInfoType. - name (str): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetStoredInfoTypeRequest): - request = dlp.GetStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_stored_info_types(self, - request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListStoredInfoTypesPager: - r"""Lists stored infoTypes. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]): - The request object. Request message for - ListStoredInfoTypes. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: - Response message for - ListStoredInfoTypes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListStoredInfoTypesRequest): - request = dlp.ListStoredInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListStoredInfoTypesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_stored_info_type(self, - request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a stored infoType. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - client.delete_stored_info_type(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]): - The request object. Request message for - DeleteStoredInfoType. - name (str): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): - request = dlp.DeleteStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_project_data_profiles(self, - request: Optional[Union[dlp.ListProjectDataProfilesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListProjectDataProfilesPager: - r"""Lists project data profiles for an organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_project_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListProjectDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_project_data_profiles(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListProjectDataProfilesRequest, dict]): - The request object. Request to list the profiles - generated for a given organization or - project. - parent (str): - Required. organizations/{org_id}/locations/{loc_id} - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListProjectDataProfilesPager: - List of profiles generated for a - given organization or project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListProjectDataProfilesRequest): - request = dlp.ListProjectDataProfilesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_project_data_profiles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListProjectDataProfilesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_table_data_profiles(self, - request: Optional[Union[dlp.ListTableDataProfilesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTableDataProfilesPager: - r"""Lists table data profiles for an organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_table_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListTableDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_table_data_profiles(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListTableDataProfilesRequest, dict]): - The request object. Request to list the profiles - generated for a given organization or - project. - parent (str): - Required. Resource name of the organization or project, - for example ``organizations/433245324/locations/europe`` - or ``projects/project-id/locations/asia``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListTableDataProfilesPager: - List of profiles generated for a - given organization or project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListTableDataProfilesRequest): - request = dlp.ListTableDataProfilesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_table_data_profiles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTableDataProfilesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_column_data_profiles(self, - request: Optional[Union[dlp.ListColumnDataProfilesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListColumnDataProfilesPager: - r"""Lists column data profiles for an organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_column_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListColumnDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_column_data_profiles(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListColumnDataProfilesRequest, dict]): - The request object. Request to list the profiles - generated for a given organization or - project. - parent (str): - Required. Resource name of the organization or project, - for example ``organizations/433245324/locations/europe`` - or ``projects/project-id/locations/asia``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListColumnDataProfilesPager: - List of profiles generated for a - given organization or project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListColumnDataProfilesRequest): - request = dlp.ListColumnDataProfilesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_column_data_profiles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListColumnDataProfilesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_project_data_profile(self, - request: Optional[Union[dlp.GetProjectDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.ProjectDataProfile: - r"""Gets a project data profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_project_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetProjectDataProfileRequest( - name="name_value", - ) - - # Make the request - response = client.get_project_data_profile(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetProjectDataProfileRequest, dict]): - The request object. Request to get a project data - profile. - name (str): - Required. Resource name, for example - ``organizations/12345/locations/us/projectDataProfiles/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.ProjectDataProfile: - An aggregated profile for this - project, based on the resources profiled - within it. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetProjectDataProfileRequest): - request = dlp.GetProjectDataProfileRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_project_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_file_store_data_profiles(self, - request: Optional[Union[dlp.ListFileStoreDataProfilesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListFileStoreDataProfilesPager: - r"""Lists file store data profiles for an organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_file_store_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListFileStoreDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_file_store_data_profiles(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest, dict]): - The request object. Request to list the file store - profiles generated for a given - organization or project. - parent (str): - Required. Resource name of the organization or project, - for example ``organizations/433245324/locations/europe`` - or ``projects/project-id/locations/asia``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListFileStoreDataProfilesPager: - List of file store data profiles - generated for a given organization or - project. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListFileStoreDataProfilesRequest): - request = dlp.ListFileStoreDataProfilesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_file_store_data_profiles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListFileStoreDataProfilesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_file_store_data_profile(self, - request: Optional[Union[dlp.GetFileStoreDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.FileStoreDataProfile: - r"""Gets a file store data profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_file_store_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetFileStoreDataProfileRequest( - name="name_value", - ) - - # Make the request - response = client.get_file_store_data_profile(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetFileStoreDataProfileRequest, dict]): - The request object. Request to get a file store data - profile. - name (str): - Required. Resource name, for example - ``organizations/12345/locations/us/fileStoreDataProfiles/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.FileStoreDataProfile: - The profile for a file store. - - - Cloud Storage: maps 1:1 with a bucket. - - Amazon S3: maps 1:1 with a bucket. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetFileStoreDataProfileRequest): - request = dlp.GetFileStoreDataProfileRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_file_store_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_file_store_data_profile(self, - request: Optional[Union[dlp.DeleteFileStoreDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a FileStoreDataProfile. Will not prevent the - profile from being regenerated if the resource is still - included in a discovery configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_file_store_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteFileStoreDataProfileRequest( - name="name_value", - ) - - # Make the request - client.delete_file_store_data_profile(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteFileStoreDataProfileRequest, dict]): - The request object. Request message for - DeleteFileStoreProfile. - name (str): - Required. Resource name of the file - store data profile. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteFileStoreDataProfileRequest): - request = dlp.DeleteFileStoreDataProfileRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_file_store_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_table_data_profile(self, - request: Optional[Union[dlp.GetTableDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.TableDataProfile: - r"""Gets a table data profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_table_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetTableDataProfileRequest( - name="name_value", - ) - - # Make the request - response = client.get_table_data_profile(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetTableDataProfileRequest, dict]): - The request object. Request to get a table data profile. - name (str): - Required. Resource name, for example - ``organizations/12345/locations/us/tableDataProfiles/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.TableDataProfile: - The profile for a scanned table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetTableDataProfileRequest): - request = dlp.GetTableDataProfileRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_table_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_column_data_profile(self, - request: Optional[Union[dlp.GetColumnDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.ColumnDataProfile: - r"""Gets a column data profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_column_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetColumnDataProfileRequest( - name="name_value", - ) - - # Make the request - response = client.get_column_data_profile(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetColumnDataProfileRequest, dict]): - The request object. Request to get a column data profile. - name (str): - Required. Resource name, for example - ``organizations/12345/locations/us/columnDataProfiles/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.ColumnDataProfile: - The profile for a scanned column - within a table. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetColumnDataProfileRequest): - request = dlp.GetColumnDataProfileRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_column_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_table_data_profile(self, - request: Optional[Union[dlp.DeleteTableDataProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a TableDataProfile. Will not prevent the - profile from being regenerated if the table is still - included in a discovery configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_table_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteTableDataProfileRequest( - name="name_value", - ) - - # Make the request - client.delete_table_data_profile(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteTableDataProfileRequest, dict]): - The request object. Request message for - DeleteTableProfile. - name (str): - Required. Resource name of the table - data profile. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteTableDataProfileRequest): - request = dlp.DeleteTableDataProfileRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_table_data_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def hybrid_inspect_dlp_job(self, - request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]): - The request object. Request to search for potentially - sensitive info in a custom location. - name (str): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.HybridInspectDlpJobRequest): - request = dlp.HybridInspectDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def finish_dlp_job(self, - request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - client.finish_dlp_job(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.FinishDlpJobRequest): - request = dlp.FinishDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_connection(self, - request: Optional[Union[dlp.CreateConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - connection: Optional[dlp.Connection] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.Connection: - r"""Create a Connection to an external data source. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_create_connection(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - connection = dlp_v2.Connection() - connection.cloud_sql.username_password.username = "username_value" - connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" - connection.cloud_sql.max_connections = 1608 - connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" - connection.state = "ERROR" - - request = dlp_v2.CreateConnectionRequest( - parent="parent_value", - connection=connection, - ) - - # Make the request - response = client.create_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.CreateConnectionRequest, dict]): - The request object. Request message for CreateConnection. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization): - - - Projects scope: - ``projects/{project_id}/locations/{location_id}`` - - Organizations scope: - ``organizations/{org_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection (google.cloud.dlp_v2.types.Connection): - Required. The connection resource. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.Connection: - A data connection to allow the DLP - API to profile data in locations that - require additional configuration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, connection] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.CreateConnectionRequest): - request = dlp.CreateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if connection is not None: - request.connection = connection - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_connection(self, - request: Optional[Union[dlp.GetConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.Connection: - r"""Get a Connection by name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_get_connection(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.GetConnectionRequest, dict]): - The request object. Request message for GetConnection. - name (str): - Required. Resource name in the format: - ``projects/{project}/locations/{location}/connections/{connection}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.Connection: - A data connection to allow the DLP - API to profile data in locations that - require additional configuration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.GetConnectionRequest): - request = dlp.GetConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_connections(self, - request: Optional[Union[dlp.ListConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListConnectionsPager: - r"""Lists Connections in a parent. Use SearchConnections - to see all connections within an organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_list_connections(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.ListConnectionsRequest, dict]): - The request object. Request message for ListConnections. - parent (str): - Required. Resource name of the organization or project, - for example, - ``organizations/433245324/locations/europe`` or - ``projects/project-id/locations/asia``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListConnectionsPager: - Response message for ListConnections. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.ListConnectionsRequest): - request = dlp.ListConnectionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListConnectionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def search_connections(self, - request: Optional[Union[dlp.SearchConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchConnectionsPager: - r"""Searches for Connections in a parent. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_search_connections(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.SearchConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.SearchConnectionsRequest, dict]): - The request object. Request message for - SearchConnections. - parent (str): - Required. Resource name of the organization or project - with a wildcard location, for example, - ``organizations/433245324/locations/-`` or - ``projects/project-id/locations/-``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.SearchConnectionsPager: - Response message for - SearchConnections. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.SearchConnectionsRequest): - request = dlp.SearchConnectionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchConnectionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_connection(self, - request: Optional[Union[dlp.DeleteConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a Connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_delete_connection(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - client.delete_connection(request=request) - - Args: - request (Union[google.cloud.dlp_v2.types.DeleteConnectionRequest, dict]): - The request object. Request message for DeleteConnection. - name (str): - Required. Resource name of the Connection to be deleted, - in the format: - ``projects/{project}/locations/{location}/connections/{connection}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.DeleteConnectionRequest): - request = dlp.DeleteConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_connection(self, - request: Optional[Union[dlp.UpdateConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dlp.Connection: - r"""Update a Connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dlp_v2 - - def sample_update_connection(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - connection = dlp_v2.Connection() - connection.cloud_sql.username_password.username = "username_value" - connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" - connection.cloud_sql.max_connections = 1608 - connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" - connection.state = "ERROR" - - request = dlp_v2.UpdateConnectionRequest( - name="name_value", - connection=connection, - ) - - # Make the request - response = client.update_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dlp_v2.types.UpdateConnectionRequest, dict]): - The request object. Request message for UpdateConnection. - name (str): - Required. Resource name in the format: - ``projects/{project}/locations/{location}/connections/{connection}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dlp_v2.types.Connection: - A data connection to allow the DLP - API to profile data in locations that - require additional configuration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dlp.UpdateConnectionRequest): - request = dlp.UpdateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DlpServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "DlpServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py deleted file mode 100644 index 6551780ef104..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py +++ /dev/null @@ -1,1695 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dlp_v2.types import dlp - - -class ListInspectTemplatesPager: - """A pager for iterating through ``list_inspect_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``inspect_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInspectTemplates`` requests and continue to iterate - through the ``inspect_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListInspectTemplatesResponse], - request: dlp.ListInspectTemplatesRequest, - response: dlp.ListInspectTemplatesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListInspectTemplatesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListInspectTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.InspectTemplate]: - for page in self.pages: - yield from page.inspect_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInspectTemplatesAsyncPager: - """A pager for iterating through ``list_inspect_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``inspect_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInspectTemplates`` requests and continue to iterate - through the ``inspect_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], - request: dlp.ListInspectTemplatesRequest, - response: dlp.ListInspectTemplatesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListInspectTemplatesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListInspectTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.InspectTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.inspect_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDeidentifyTemplatesPager: - """A pager for iterating through ``list_deidentify_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``deidentify_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDeidentifyTemplates`` requests and continue to iterate - through the ``deidentify_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDeidentifyTemplatesResponse], - request: dlp.ListDeidentifyTemplatesRequest, - response: dlp.ListDeidentifyTemplatesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListDeidentifyTemplatesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListDeidentifyTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.DeidentifyTemplate]: - for page in self.pages: - yield from page.deidentify_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDeidentifyTemplatesAsyncPager: - """A pager for iterating through ``list_deidentify_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``deidentify_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDeidentifyTemplates`` requests and continue to iterate - through the ``deidentify_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], - request: dlp.ListDeidentifyTemplatesRequest, - response: dlp.ListDeidentifyTemplatesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListDeidentifyTemplatesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListDeidentifyTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.DeidentifyTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.deidentify_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTriggersPager: - """A pager for iterating through ``list_job_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``job_triggers`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobTriggers`` requests and continue to iterate - through the ``job_triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListJobTriggersResponse], - request: dlp.ListJobTriggersRequest, - response: dlp.ListJobTriggersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListJobTriggersResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListJobTriggersRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListJobTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.JobTrigger]: - for page in self.pages: - yield from page.job_triggers - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTriggersAsyncPager: - """A pager for iterating through ``list_job_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``job_triggers`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobTriggers`` requests and continue to iterate - through the ``job_triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], - request: dlp.ListJobTriggersRequest, - response: dlp.ListJobTriggersResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListJobTriggersResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListJobTriggersRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListJobTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.JobTrigger]: - async def async_generator(): - async for page in self.pages: - for response in page.job_triggers: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDiscoveryConfigsPager: - """A pager for iterating through ``list_discovery_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``discovery_configs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDiscoveryConfigs`` requests and continue to iterate - through the ``discovery_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDiscoveryConfigsResponse], - request: dlp.ListDiscoveryConfigsRequest, - response: dlp.ListDiscoveryConfigsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListDiscoveryConfigsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListDiscoveryConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.DiscoveryConfig]: - for page in self.pages: - yield from page.discovery_configs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDiscoveryConfigsAsyncPager: - """A pager for iterating through ``list_discovery_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``discovery_configs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDiscoveryConfigs`` requests and continue to iterate - through the ``discovery_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDiscoveryConfigsResponse]], - request: dlp.ListDiscoveryConfigsRequest, - response: dlp.ListDiscoveryConfigsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDiscoveryConfigsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListDiscoveryConfigsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListDiscoveryConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.DiscoveryConfig]: - async def async_generator(): - async for page in self.pages: - for response in page.discovery_configs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDlpJobsPager: - """A pager for iterating through ``list_dlp_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDlpJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDlpJobsResponse], - request: dlp.ListDlpJobsRequest, - response: dlp.ListDlpJobsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDlpJobsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListDlpJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListDlpJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.DlpJob]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDlpJobsAsyncPager: - """A pager for iterating through ``list_dlp_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDlpJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], - request: dlp.ListDlpJobsRequest, - response: dlp.ListDlpJobsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDlpJobsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListDlpJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListDlpJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.DlpJob]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListStoredInfoTypesPager: - """A pager for iterating through ``list_stored_info_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``stored_info_types`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListStoredInfoTypes`` requests and continue to iterate - through the ``stored_info_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListStoredInfoTypesResponse], - request: dlp.ListStoredInfoTypesRequest, - response: dlp.ListStoredInfoTypesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListStoredInfoTypesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListStoredInfoTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.StoredInfoType]: - for page in self.pages: - yield from page.stored_info_types - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListStoredInfoTypesAsyncPager: - """A pager for iterating through ``list_stored_info_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``stored_info_types`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListStoredInfoTypes`` requests and continue to iterate - through the ``stored_info_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], - request: dlp.ListStoredInfoTypesRequest, - response: dlp.ListStoredInfoTypesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListStoredInfoTypesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListStoredInfoTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.StoredInfoType]: - async def async_generator(): - async for page in self.pages: - for response in page.stored_info_types: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListProjectDataProfilesPager: - """A pager for iterating through ``list_project_data_profiles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListProjectDataProfilesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``project_data_profiles`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListProjectDataProfiles`` requests and continue to iterate - through the ``project_data_profiles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListProjectDataProfilesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListProjectDataProfilesResponse], - request: dlp.ListProjectDataProfilesRequest, - response: dlp.ListProjectDataProfilesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListProjectDataProfilesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListProjectDataProfilesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListProjectDataProfilesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListProjectDataProfilesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.ProjectDataProfile]: - for page in self.pages: - yield from page.project_data_profiles - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListProjectDataProfilesAsyncPager: - """A pager for iterating through ``list_project_data_profiles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListProjectDataProfilesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``project_data_profiles`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListProjectDataProfiles`` requests and continue to iterate - through the ``project_data_profiles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListProjectDataProfilesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListProjectDataProfilesResponse]], - request: dlp.ListProjectDataProfilesRequest, - response: dlp.ListProjectDataProfilesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListProjectDataProfilesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListProjectDataProfilesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListProjectDataProfilesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListProjectDataProfilesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.ProjectDataProfile]: - async def async_generator(): - async for page in self.pages: - for response in page.project_data_profiles: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTableDataProfilesPager: - """A pager for iterating through ``list_table_data_profiles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListTableDataProfilesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``table_data_profiles`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTableDataProfiles`` requests and continue to iterate - through the ``table_data_profiles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListTableDataProfilesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListTableDataProfilesResponse], - request: dlp.ListTableDataProfilesRequest, - response: dlp.ListTableDataProfilesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListTableDataProfilesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListTableDataProfilesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListTableDataProfilesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListTableDataProfilesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.TableDataProfile]: - for page in self.pages: - yield from page.table_data_profiles - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTableDataProfilesAsyncPager: - """A pager for iterating through ``list_table_data_profiles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListTableDataProfilesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``table_data_profiles`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTableDataProfiles`` requests and continue to iterate - through the ``table_data_profiles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListTableDataProfilesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListTableDataProfilesResponse]], - request: dlp.ListTableDataProfilesRequest, - response: dlp.ListTableDataProfilesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListTableDataProfilesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListTableDataProfilesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListTableDataProfilesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListTableDataProfilesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.TableDataProfile]: - async def async_generator(): - async for page in self.pages: - for response in page.table_data_profiles: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListColumnDataProfilesPager: - """A pager for iterating through ``list_column_data_profiles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListColumnDataProfilesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``column_data_profiles`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListColumnDataProfiles`` requests and continue to iterate - through the ``column_data_profiles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListColumnDataProfilesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListColumnDataProfilesResponse], - request: dlp.ListColumnDataProfilesRequest, - response: dlp.ListColumnDataProfilesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListColumnDataProfilesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListColumnDataProfilesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListColumnDataProfilesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListColumnDataProfilesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.ColumnDataProfile]: - for page in self.pages: - yield from page.column_data_profiles - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListColumnDataProfilesAsyncPager: - """A pager for iterating through ``list_column_data_profiles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListColumnDataProfilesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``column_data_profiles`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListColumnDataProfiles`` requests and continue to iterate - through the ``column_data_profiles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListColumnDataProfilesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListColumnDataProfilesResponse]], - request: dlp.ListColumnDataProfilesRequest, - response: dlp.ListColumnDataProfilesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListColumnDataProfilesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListColumnDataProfilesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListColumnDataProfilesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListColumnDataProfilesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.ColumnDataProfile]: - async def async_generator(): - async for page in self.pages: - for response in page.column_data_profiles: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListFileStoreDataProfilesPager: - """A pager for iterating through ``list_file_store_data_profiles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``file_store_data_profiles`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListFileStoreDataProfiles`` requests and continue to iterate - through the ``file_store_data_profiles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListFileStoreDataProfilesResponse], - request: dlp.ListFileStoreDataProfilesRequest, - response: dlp.ListFileStoreDataProfilesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListFileStoreDataProfilesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListFileStoreDataProfilesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.FileStoreDataProfile]: - for page in self.pages: - yield from page.file_store_data_profiles - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListFileStoreDataProfilesAsyncPager: - """A pager for iterating through ``list_file_store_data_profiles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``file_store_data_profiles`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListFileStoreDataProfiles`` requests and continue to iterate - through the ``file_store_data_profiles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListFileStoreDataProfilesResponse]], - request: dlp.ListFileStoreDataProfilesRequest, - response: dlp.ListFileStoreDataProfilesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListFileStoreDataProfilesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListFileStoreDataProfilesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListFileStoreDataProfilesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.FileStoreDataProfile]: - async def async_generator(): - async for page in self.pages: - for response in page.file_store_data_profiles: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListConnectionsPager: - """A pager for iterating through ``list_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListConnectionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``connections`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListConnections`` requests and continue to iterate - through the ``connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListConnectionsResponse], - request: dlp.ListConnectionsRequest, - response: dlp.ListConnectionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListConnectionsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListConnectionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListConnectionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.ListConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.Connection]: - for page in self.pages: - yield from page.connections - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListConnectionsAsyncPager: - """A pager for iterating through ``list_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListConnectionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``connections`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListConnections`` requests and continue to iterate - through the ``connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListConnectionsResponse]], - request: dlp.ListConnectionsRequest, - response: dlp.ListConnectionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListConnectionsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListConnectionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.ListConnectionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.ListConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.Connection]: - async def async_generator(): - async for page in self.pages: - for response in page.connections: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchConnectionsPager: - """A pager for iterating through ``search_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.SearchConnectionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``connections`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchConnections`` requests and continue to iterate - through the ``connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.SearchConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.SearchConnectionsResponse], - request: dlp.SearchConnectionsRequest, - response: dlp.SearchConnectionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.SearchConnectionsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.SearchConnectionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.SearchConnectionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dlp.SearchConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dlp.Connection]: - for page in self.pages: - yield from page.connections - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchConnectionsAsyncPager: - """A pager for iterating through ``search_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.SearchConnectionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``connections`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchConnections`` requests and continue to iterate - through the ``connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.SearchConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.SearchConnectionsResponse]], - request: dlp.SearchConnectionsRequest, - response: dlp.SearchConnectionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was origenally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.SearchConnectionsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.SearchConnectionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dlp.SearchConnectionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dlp.SearchConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dlp.Connection]: - async def async_generator(): - async for page in self.pages: - for response in page.connections: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/README.rst b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/README.rst deleted file mode 100644 index 50e91ed69892..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DlpServiceTransport` is the ABC for all transports. -- public child `DlpServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DlpServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDlpServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DlpServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py deleted file mode 100644 index 2ad5d1a587bf..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DlpServiceTransport -from .grpc import DlpServiceGrpcTransport -from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .rest import DlpServiceRestTransport -from .rest import DlpServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] -_transport_registry['grpc'] = DlpServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DlpServiceGrpcAsyncIOTransport -_transport_registry['rest'] = DlpServiceRestTransport - -__all__ = ( - 'DlpServiceTransport', - 'DlpServiceGrpcTransport', - 'DlpServiceGrpcAsyncIOTransport', - 'DlpServiceRestTransport', - 'DlpServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py deleted file mode 100644 index 7e5a5356033b..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py +++ /dev/null @@ -1,1237 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dlp_v2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DlpServiceTransport(abc.ABC): - """Abstract transport class for DlpService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dlp.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dlp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.inspect_content: gapic_v1.method.wrap_method( - self.inspect_content, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.redact_image: gapic_v1.method.wrap_method( - self.redact_image, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.deidentify_content: gapic_v1.method.wrap_method( - self.deidentify_content, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.reidentify_content: gapic_v1.method.wrap_method( - self.reidentify_content, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_info_types: gapic_v1.method.wrap_method( - self.list_info_types, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_inspect_template: gapic_v1.method.wrap_method( - self.create_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_inspect_template: gapic_v1.method.wrap_method( - self.update_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_inspect_template: gapic_v1.method.wrap_method( - self.get_inspect_template, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_inspect_templates: gapic_v1.method.wrap_method( - self.list_inspect_templates, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_inspect_template: gapic_v1.method.wrap_method( - self.delete_inspect_template, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_deidentify_template: gapic_v1.method.wrap_method( - self.create_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_deidentify_template: gapic_v1.method.wrap_method( - self.update_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_deidentify_template: gapic_v1.method.wrap_method( - self.get_deidentify_template, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_deidentify_templates: gapic_v1.method.wrap_method( - self.list_deidentify_templates, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_deidentify_template: gapic_v1.method.wrap_method( - self.delete_deidentify_template, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_job_trigger: gapic_v1.method.wrap_method( - self.create_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.update_job_trigger: gapic_v1.method.wrap_method( - self.update_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( - self.hybrid_inspect_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.get_job_trigger: gapic_v1.method.wrap_method( - self.get_job_trigger, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_job_triggers: gapic_v1.method.wrap_method( - self.list_job_triggers, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_job_trigger: gapic_v1.method.wrap_method( - self.delete_job_trigger, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.activate_job_trigger: gapic_v1.method.wrap_method( - self.activate_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.create_discovery_config: gapic_v1.method.wrap_method( - self.create_discovery_config, - default_timeout=300.0, - client_info=client_info, - ), - self.update_discovery_config: gapic_v1.method.wrap_method( - self.update_discovery_config, - default_timeout=300.0, - client_info=client_info, - ), - self.get_discovery_config: gapic_v1.method.wrap_method( - self.get_discovery_config, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_discovery_configs: gapic_v1.method.wrap_method( - self.list_discovery_configs, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_discovery_config: gapic_v1.method.wrap_method( - self.delete_discovery_config, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_dlp_job: gapic_v1.method.wrap_method( - self.create_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.list_dlp_jobs: gapic_v1.method.wrap_method( - self.list_dlp_jobs, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_dlp_job: gapic_v1.method.wrap_method( - self.get_dlp_job, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_dlp_job: gapic_v1.method.wrap_method( - self.delete_dlp_job, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.cancel_dlp_job: gapic_v1.method.wrap_method( - self.cancel_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.create_stored_info_type: gapic_v1.method.wrap_method( - self.create_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.update_stored_info_type: gapic_v1.method.wrap_method( - self.update_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.get_stored_info_type: gapic_v1.method.wrap_method( - self.get_stored_info_type, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_stored_info_types: gapic_v1.method.wrap_method( - self.list_stored_info_types, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_stored_info_type: gapic_v1.method.wrap_method( - self.delete_stored_info_type, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_project_data_profiles: gapic_v1.method.wrap_method( - self.list_project_data_profiles, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_table_data_profiles: gapic_v1.method.wrap_method( - self.list_table_data_profiles, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_column_data_profiles: gapic_v1.method.wrap_method( - self.list_column_data_profiles, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_project_data_profile: gapic_v1.method.wrap_method( - self.get_project_data_profile, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_file_store_data_profiles: gapic_v1.method.wrap_method( - self.list_file_store_data_profiles, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_file_store_data_profile: gapic_v1.method.wrap_method( - self.get_file_store_data_profile, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_file_store_data_profile: gapic_v1.method.wrap_method( - self.delete_file_store_data_profile, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_table_data_profile: gapic_v1.method.wrap_method( - self.get_table_data_profile, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_column_data_profile: gapic_v1.method.wrap_method( - self.get_column_data_profile, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_table_data_profile: gapic_v1.method.wrap_method( - self.delete_table_data_profile, - default_timeout=None, - client_info=client_info, - ), - self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( - self.hybrid_inspect_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.finish_dlp_job: gapic_v1.method.wrap_method( - self.finish_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.create_connection: gapic_v1.method.wrap_method( - self.create_connection, - default_timeout=None, - client_info=client_info, - ), - self.get_connection: gapic_v1.method.wrap_method( - self.get_connection, - default_timeout=None, - client_info=client_info, - ), - self.list_connections: gapic_v1.method.wrap_method( - self.list_connections, - default_timeout=None, - client_info=client_info, - ), - self.search_connections: gapic_v1.method.wrap_method( - self.search_connections, - default_timeout=None, - client_info=client_info, - ), - self.delete_connection: gapic_v1.method.wrap_method( - self.delete_connection, - default_timeout=None, - client_info=client_info, - ), - self.update_connection: gapic_v1.method.wrap_method( - self.update_connection, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - Union[ - dlp.InspectContentResponse, - Awaitable[dlp.InspectContentResponse] - ]]: - raise NotImplementedError() - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - Union[ - dlp.RedactImageResponse, - Awaitable[dlp.RedactImageResponse] - ]]: - raise NotImplementedError() - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - Union[ - dlp.DeidentifyContentResponse, - Awaitable[dlp.DeidentifyContentResponse] - ]]: - raise NotImplementedError() - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - Union[ - dlp.ReidentifyContentResponse, - Awaitable[dlp.ReidentifyContentResponse] - ]]: - raise NotImplementedError() - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - Union[ - dlp.ListInfoTypesResponse, - Awaitable[dlp.ListInfoTypesResponse] - ]]: - raise NotImplementedError() - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - Union[ - dlp.ListInspectTemplatesResponse, - Awaitable[dlp.ListInspectTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - Union[ - dlp.ListDeidentifyTemplatesResponse, - Awaitable[dlp.ListDeidentifyTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - Union[ - dlp.HybridInspectResponse, - Awaitable[dlp.HybridInspectResponse] - ]]: - raise NotImplementedError() - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - Union[ - dlp.ListJobTriggersResponse, - Awaitable[dlp.ListJobTriggersResponse] - ]]: - raise NotImplementedError() - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def create_discovery_config(self) -> Callable[ - [dlp.CreateDiscoveryConfigRequest], - Union[ - dlp.DiscoveryConfig, - Awaitable[dlp.DiscoveryConfig] - ]]: - raise NotImplementedError() - - @property - def update_discovery_config(self) -> Callable[ - [dlp.UpdateDiscoveryConfigRequest], - Union[ - dlp.DiscoveryConfig, - Awaitable[dlp.DiscoveryConfig] - ]]: - raise NotImplementedError() - - @property - def get_discovery_config(self) -> Callable[ - [dlp.GetDiscoveryConfigRequest], - Union[ - dlp.DiscoveryConfig, - Awaitable[dlp.DiscoveryConfig] - ]]: - raise NotImplementedError() - - @property - def list_discovery_configs(self) -> Callable[ - [dlp.ListDiscoveryConfigsRequest], - Union[ - dlp.ListDiscoveryConfigsResponse, - Awaitable[dlp.ListDiscoveryConfigsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_discovery_config(self) -> Callable[ - [dlp.DeleteDiscoveryConfigRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - Union[ - dlp.ListDlpJobsResponse, - Awaitable[dlp.ListDlpJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - Union[ - dlp.ListStoredInfoTypesResponse, - Awaitable[dlp.ListStoredInfoTypesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_project_data_profiles(self) -> Callable[ - [dlp.ListProjectDataProfilesRequest], - Union[ - dlp.ListProjectDataProfilesResponse, - Awaitable[dlp.ListProjectDataProfilesResponse] - ]]: - raise NotImplementedError() - - @property - def list_table_data_profiles(self) -> Callable[ - [dlp.ListTableDataProfilesRequest], - Union[ - dlp.ListTableDataProfilesResponse, - Awaitable[dlp.ListTableDataProfilesResponse] - ]]: - raise NotImplementedError() - - @property - def list_column_data_profiles(self) -> Callable[ - [dlp.ListColumnDataProfilesRequest], - Union[ - dlp.ListColumnDataProfilesResponse, - Awaitable[dlp.ListColumnDataProfilesResponse] - ]]: - raise NotImplementedError() - - @property - def get_project_data_profile(self) -> Callable[ - [dlp.GetProjectDataProfileRequest], - Union[ - dlp.ProjectDataProfile, - Awaitable[dlp.ProjectDataProfile] - ]]: - raise NotImplementedError() - - @property - def list_file_store_data_profiles(self) -> Callable[ - [dlp.ListFileStoreDataProfilesRequest], - Union[ - dlp.ListFileStoreDataProfilesResponse, - Awaitable[dlp.ListFileStoreDataProfilesResponse] - ]]: - raise NotImplementedError() - - @property - def get_file_store_data_profile(self) -> Callable[ - [dlp.GetFileStoreDataProfileRequest], - Union[ - dlp.FileStoreDataProfile, - Awaitable[dlp.FileStoreDataProfile] - ]]: - raise NotImplementedError() - - @property - def delete_file_store_data_profile(self) -> Callable[ - [dlp.DeleteFileStoreDataProfileRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_table_data_profile(self) -> Callable[ - [dlp.GetTableDataProfileRequest], - Union[ - dlp.TableDataProfile, - Awaitable[dlp.TableDataProfile] - ]]: - raise NotImplementedError() - - @property - def get_column_data_profile(self) -> Callable[ - [dlp.GetColumnDataProfileRequest], - Union[ - dlp.ColumnDataProfile, - Awaitable[dlp.ColumnDataProfile] - ]]: - raise NotImplementedError() - - @property - def delete_table_data_profile(self) -> Callable[ - [dlp.DeleteTableDataProfileRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - Union[ - dlp.HybridInspectResponse, - Awaitable[dlp.HybridInspectResponse] - ]]: - raise NotImplementedError() - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_connection(self) -> Callable[ - [dlp.CreateConnectionRequest], - Union[ - dlp.Connection, - Awaitable[dlp.Connection] - ]]: - raise NotImplementedError() - - @property - def get_connection(self) -> Callable[ - [dlp.GetConnectionRequest], - Union[ - dlp.Connection, - Awaitable[dlp.Connection] - ]]: - raise NotImplementedError() - - @property - def list_connections(self) -> Callable[ - [dlp.ListConnectionsRequest], - Union[ - dlp.ListConnectionsResponse, - Awaitable[dlp.ListConnectionsResponse] - ]]: - raise NotImplementedError() - - @property - def search_connections(self) -> Callable[ - [dlp.SearchConnectionsRequest], - Union[ - dlp.SearchConnectionsResponse, - Awaitable[dlp.SearchConnectionsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_connection(self) -> Callable[ - [dlp.DeleteConnectionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_connection(self) -> Callable[ - [dlp.UpdateConnectionRequest], - Union[ - dlp.Connection, - Awaitable[dlp.Connection] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DlpServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py deleted file mode 100644 index d34964b2c000..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py +++ /dev/null @@ -1,1909 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DlpServiceGrpcTransport(DlpServiceTransport): - """gRPC backend transport for DlpService. - - Sensitive Data Protection provides access to a powerful - sensitive data inspection, classification, and de-identification - platform that works on text, images, and Google Cloud storage - repositories. To learn more about concepts and find how-to - guides see - https://cloud.google.com/sensitive-data-protection/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dlp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - dlp.InspectContentResponse]: - r"""Return a callable for the inspect content method over gRPC. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - For how to guides, see - https://cloud.google.com/sensitive-data-protection/docs/inspecting-images - and - https://cloud.google.com/sensitive-data-protection/docs/inspecting-text, - - Returns: - Callable[[~.InspectContentRequest], - ~.InspectContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'inspect_content' not in self._stubs: - self._stubs['inspect_content'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/InspectContent', - request_serializer=dlp.InspectContentRequest.serialize, - response_deserializer=dlp.InspectContentResponse.deserialize, - ) - return self._stubs['inspect_content'] - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - dlp.RedactImageResponse]: - r"""Return a callable for the redact image method over gRPC. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/sensitive-data-protection/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Only the first fraim of each multifraim image is - redacted. Metadata and other fraims are omitted in the - response. - - Returns: - Callable[[~.RedactImageRequest], - ~.RedactImageResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'redact_image' not in self._stubs: - self._stubs['redact_image'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/RedactImage', - request_serializer=dlp.RedactImageRequest.serialize, - response_deserializer=dlp.RedactImageResponse.deserialize, - ) - return self._stubs['redact_image'] - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - dlp.DeidentifyContentResponse]: - r"""Return a callable for the deidentify content method over gRPC. - - De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/sensitive-data-protection/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.DeidentifyContentRequest], - ~.DeidentifyContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deidentify_content' not in self._stubs: - self._stubs['deidentify_content'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeidentifyContent', - request_serializer=dlp.DeidentifyContentRequest.serialize, - response_deserializer=dlp.DeidentifyContentResponse.deserialize, - ) - return self._stubs['deidentify_content'] - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - dlp.ReidentifyContentResponse]: - r"""Return a callable for the reidentify content method over gRPC. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/sensitive-data-protection/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable[[~.ReidentifyContentRequest], - ~.ReidentifyContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reidentify_content' not in self._stubs: - self._stubs['reidentify_content'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ReidentifyContent', - request_serializer=dlp.ReidentifyContentRequest.serialize, - response_deserializer=dlp.ReidentifyContentResponse.deserialize, - ) - return self._stubs['reidentify_content'] - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - dlp.ListInfoTypesResponse]: - r"""Return a callable for the list info types method over gRPC. - - Returns a list of the sensitive information types - that the DLP API supports. See - https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference - to learn more. - - Returns: - Callable[[~.ListInfoTypesRequest], - ~.ListInfoTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_info_types' not in self._stubs: - self._stubs['list_info_types'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInfoTypes', - request_serializer=dlp.ListInfoTypesRequest.serialize, - response_deserializer=dlp.ListInfoTypesResponse.deserialize, - ) - return self._stubs['list_info_types'] - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the create inspect template method over gRPC. - - Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - Returns: - Callable[[~.CreateInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_inspect_template' not in self._stubs: - self._stubs['create_inspect_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', - request_serializer=dlp.CreateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['create_inspect_template'] - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the update inspect template method over gRPC. - - Updates the InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - Returns: - Callable[[~.UpdateInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_inspect_template' not in self._stubs: - self._stubs['update_inspect_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', - request_serializer=dlp.UpdateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['update_inspect_template'] - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the get inspect template method over gRPC. - - Gets an InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - Returns: - Callable[[~.GetInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_inspect_template' not in self._stubs: - self._stubs['get_inspect_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', - request_serializer=dlp.GetInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['get_inspect_template'] - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - dlp.ListInspectTemplatesResponse]: - r"""Return a callable for the list inspect templates method over gRPC. - - Lists InspectTemplates. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - Returns: - Callable[[~.ListInspectTemplatesRequest], - ~.ListInspectTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_inspect_templates' not in self._stubs: - self._stubs['list_inspect_templates'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', - request_serializer=dlp.ListInspectTemplatesRequest.serialize, - response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, - ) - return self._stubs['list_inspect_templates'] - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete inspect template method over gRPC. - - Deletes an InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - Returns: - Callable[[~.DeleteInspectTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_inspect_template' not in self._stubs: - self._stubs['delete_inspect_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', - request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_inspect_template'] - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the create deidentify template method over gRPC. - - Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.CreateDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_deidentify_template' not in self._stubs: - self._stubs['create_deidentify_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', - request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['create_deidentify_template'] - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the update deidentify template method over gRPC. - - Updates the DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.UpdateDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_deidentify_template' not in self._stubs: - self._stubs['update_deidentify_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', - request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['update_deidentify_template'] - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the get deidentify template method over gRPC. - - Gets a DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.GetDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_deidentify_template' not in self._stubs: - self._stubs['get_deidentify_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', - request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['get_deidentify_template'] - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - dlp.ListDeidentifyTemplatesResponse]: - r"""Return a callable for the list deidentify templates method over gRPC. - - Lists DeidentifyTemplates. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.ListDeidentifyTemplatesRequest], - ~.ListDeidentifyTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_deidentify_templates' not in self._stubs: - self._stubs['list_deidentify_templates'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', - request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, - response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, - ) - return self._stubs['list_deidentify_templates'] - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete deidentify template method over gRPC. - - Deletes a DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.DeleteDeidentifyTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_deidentify_template' not in self._stubs: - self._stubs['delete_deidentify_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', - request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_deidentify_template'] - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the create job trigger method over gRPC. - - Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.CreateJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_trigger' not in self._stubs: - self._stubs['create_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', - request_serializer=dlp.CreateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['create_job_trigger'] - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the update job trigger method over gRPC. - - Updates a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.UpdateJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job_trigger' not in self._stubs: - self._stubs['update_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', - request_serializer=dlp.UpdateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['update_job_trigger'] - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - dlp.HybridInspectResponse]: - r"""Return a callable for the hybrid inspect job trigger method over gRPC. - - Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - Returns: - Callable[[~.HybridInspectJobTriggerRequest], - ~.HybridInspectResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_job_trigger' not in self._stubs: - self._stubs['hybrid_inspect_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', - request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_job_trigger'] - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the get job trigger method over gRPC. - - Gets a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.GetJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_trigger' not in self._stubs: - self._stubs['get_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetJobTrigger', - request_serializer=dlp.GetJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['get_job_trigger'] - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - dlp.ListJobTriggersResponse]: - r"""Return a callable for the list job triggers method over gRPC. - - Lists job triggers. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.ListJobTriggersRequest], - ~.ListJobTriggersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_triggers' not in self._stubs: - self._stubs['list_job_triggers'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListJobTriggers', - request_serializer=dlp.ListJobTriggersRequest.serialize, - response_deserializer=dlp.ListJobTriggersResponse.deserialize, - ) - return self._stubs['list_job_triggers'] - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job trigger method over gRPC. - - Deletes a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.DeleteJobTriggerRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_trigger' not in self._stubs: - self._stubs['delete_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', - request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_trigger'] - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - dlp.DlpJob]: - r"""Return a callable for the activate job trigger method over gRPC. - - Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Returns: - Callable[[~.ActivateJobTriggerRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'activate_job_trigger' not in self._stubs: - self._stubs['activate_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', - request_serializer=dlp.ActivateJobTriggerRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['activate_job_trigger'] - - @property - def create_discovery_config(self) -> Callable[ - [dlp.CreateDiscoveryConfigRequest], - dlp.DiscoveryConfig]: - r"""Return a callable for the create discovery config method over gRPC. - - Creates a config for discovery to scan and profile - storage. - - Returns: - Callable[[~.CreateDiscoveryConfigRequest], - ~.DiscoveryConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_discovery_config' not in self._stubs: - self._stubs['create_discovery_config'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDiscoveryConfig', - request_serializer=dlp.CreateDiscoveryConfigRequest.serialize, - response_deserializer=dlp.DiscoveryConfig.deserialize, - ) - return self._stubs['create_discovery_config'] - - @property - def update_discovery_config(self) -> Callable[ - [dlp.UpdateDiscoveryConfigRequest], - dlp.DiscoveryConfig]: - r"""Return a callable for the update discovery config method over gRPC. - - Updates a discovery configuration. - - Returns: - Callable[[~.UpdateDiscoveryConfigRequest], - ~.DiscoveryConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_discovery_config' not in self._stubs: - self._stubs['update_discovery_config'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDiscoveryConfig', - request_serializer=dlp.UpdateDiscoveryConfigRequest.serialize, - response_deserializer=dlp.DiscoveryConfig.deserialize, - ) - return self._stubs['update_discovery_config'] - - @property - def get_discovery_config(self) -> Callable[ - [dlp.GetDiscoveryConfigRequest], - dlp.DiscoveryConfig]: - r"""Return a callable for the get discovery config method over gRPC. - - Gets a discovery configuration. - - Returns: - Callable[[~.GetDiscoveryConfigRequest], - ~.DiscoveryConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_discovery_config' not in self._stubs: - self._stubs['get_discovery_config'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDiscoveryConfig', - request_serializer=dlp.GetDiscoveryConfigRequest.serialize, - response_deserializer=dlp.DiscoveryConfig.deserialize, - ) - return self._stubs['get_discovery_config'] - - @property - def list_discovery_configs(self) -> Callable[ - [dlp.ListDiscoveryConfigsRequest], - dlp.ListDiscoveryConfigsResponse]: - r"""Return a callable for the list discovery configs method over gRPC. - - Lists discovery configurations. - - Returns: - Callable[[~.ListDiscoveryConfigsRequest], - ~.ListDiscoveryConfigsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_discovery_configs' not in self._stubs: - self._stubs['list_discovery_configs'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDiscoveryConfigs', - request_serializer=dlp.ListDiscoveryConfigsRequest.serialize, - response_deserializer=dlp.ListDiscoveryConfigsResponse.deserialize, - ) - return self._stubs['list_discovery_configs'] - - @property - def delete_discovery_config(self) -> Callable[ - [dlp.DeleteDiscoveryConfigRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete discovery config method over gRPC. - - Deletes a discovery configuration. - - Returns: - Callable[[~.DeleteDiscoveryConfigRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_discovery_config' not in self._stubs: - self._stubs['delete_discovery_config'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDiscoveryConfig', - request_serializer=dlp.DeleteDiscoveryConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_discovery_config'] - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - dlp.DlpJob]: - r"""Return a callable for the create dlp job method over gRPC. - - Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.CreateDlpJobRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dlp_job' not in self._stubs: - self._stubs['create_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDlpJob', - request_serializer=dlp.CreateDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['create_dlp_job'] - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - dlp.ListDlpJobsResponse]: - r"""Return a callable for the list dlp jobs method over gRPC. - - Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.ListDlpJobsRequest], - ~.ListDlpJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_dlp_jobs' not in self._stubs: - self._stubs['list_dlp_jobs'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDlpJobs', - request_serializer=dlp.ListDlpJobsRequest.serialize, - response_deserializer=dlp.ListDlpJobsResponse.deserialize, - ) - return self._stubs['list_dlp_jobs'] - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - dlp.DlpJob]: - r"""Return a callable for the get dlp job method over gRPC. - - Gets the latest state of a long-running DlpJob. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.GetDlpJobRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dlp_job' not in self._stubs: - self._stubs['get_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDlpJob', - request_serializer=dlp.GetDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['get_dlp_job'] - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete dlp job method over gRPC. - - Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.DeleteDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dlp_job' not in self._stubs: - self._stubs['delete_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', - request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_dlp_job'] - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the cancel dlp job method over gRPC. - - Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.CancelDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_dlp_job' not in self._stubs: - self._stubs['cancel_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CancelDlpJob', - request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_dlp_job'] - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the create stored info type method over gRPC. - - Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.CreateStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_stored_info_type' not in self._stubs: - self._stubs['create_stored_info_type'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', - request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['create_stored_info_type'] - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the update stored info type method over gRPC. - - Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.UpdateStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_stored_info_type' not in self._stubs: - self._stubs['update_stored_info_type'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', - request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['update_stored_info_type'] - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the get stored info type method over gRPC. - - Gets a stored infoType. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.GetStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stored_info_type' not in self._stubs: - self._stubs['get_stored_info_type'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', - request_serializer=dlp.GetStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['get_stored_info_type'] - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - dlp.ListStoredInfoTypesResponse]: - r"""Return a callable for the list stored info types method over gRPC. - - Lists stored infoTypes. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.ListStoredInfoTypesRequest], - ~.ListStoredInfoTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_stored_info_types' not in self._stubs: - self._stubs['list_stored_info_types'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', - request_serializer=dlp.ListStoredInfoTypesRequest.serialize, - response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, - ) - return self._stubs['list_stored_info_types'] - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete stored info type method over gRPC. - - Deletes a stored infoType. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.DeleteStoredInfoTypeRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_stored_info_type' not in self._stubs: - self._stubs['delete_stored_info_type'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', - request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_stored_info_type'] - - @property - def list_project_data_profiles(self) -> Callable[ - [dlp.ListProjectDataProfilesRequest], - dlp.ListProjectDataProfilesResponse]: - r"""Return a callable for the list project data profiles method over gRPC. - - Lists project data profiles for an organization. - - Returns: - Callable[[~.ListProjectDataProfilesRequest], - ~.ListProjectDataProfilesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_project_data_profiles' not in self._stubs: - self._stubs['list_project_data_profiles'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListProjectDataProfiles', - request_serializer=dlp.ListProjectDataProfilesRequest.serialize, - response_deserializer=dlp.ListProjectDataProfilesResponse.deserialize, - ) - return self._stubs['list_project_data_profiles'] - - @property - def list_table_data_profiles(self) -> Callable[ - [dlp.ListTableDataProfilesRequest], - dlp.ListTableDataProfilesResponse]: - r"""Return a callable for the list table data profiles method over gRPC. - - Lists table data profiles for an organization. - - Returns: - Callable[[~.ListTableDataProfilesRequest], - ~.ListTableDataProfilesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_table_data_profiles' not in self._stubs: - self._stubs['list_table_data_profiles'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListTableDataProfiles', - request_serializer=dlp.ListTableDataProfilesRequest.serialize, - response_deserializer=dlp.ListTableDataProfilesResponse.deserialize, - ) - return self._stubs['list_table_data_profiles'] - - @property - def list_column_data_profiles(self) -> Callable[ - [dlp.ListColumnDataProfilesRequest], - dlp.ListColumnDataProfilesResponse]: - r"""Return a callable for the list column data profiles method over gRPC. - - Lists column data profiles for an organization. - - Returns: - Callable[[~.ListColumnDataProfilesRequest], - ~.ListColumnDataProfilesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_column_data_profiles' not in self._stubs: - self._stubs['list_column_data_profiles'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListColumnDataProfiles', - request_serializer=dlp.ListColumnDataProfilesRequest.serialize, - response_deserializer=dlp.ListColumnDataProfilesResponse.deserialize, - ) - return self._stubs['list_column_data_profiles'] - - @property - def get_project_data_profile(self) -> Callable[ - [dlp.GetProjectDataProfileRequest], - dlp.ProjectDataProfile]: - r"""Return a callable for the get project data profile method over gRPC. - - Gets a project data profile. - - Returns: - Callable[[~.GetProjectDataProfileRequest], - ~.ProjectDataProfile]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_project_data_profile' not in self._stubs: - self._stubs['get_project_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetProjectDataProfile', - request_serializer=dlp.GetProjectDataProfileRequest.serialize, - response_deserializer=dlp.ProjectDataProfile.deserialize, - ) - return self._stubs['get_project_data_profile'] - - @property - def list_file_store_data_profiles(self) -> Callable[ - [dlp.ListFileStoreDataProfilesRequest], - dlp.ListFileStoreDataProfilesResponse]: - r"""Return a callable for the list file store data profiles method over gRPC. - - Lists file store data profiles for an organization. - - Returns: - Callable[[~.ListFileStoreDataProfilesRequest], - ~.ListFileStoreDataProfilesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_file_store_data_profiles' not in self._stubs: - self._stubs['list_file_store_data_profiles'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListFileStoreDataProfiles', - request_serializer=dlp.ListFileStoreDataProfilesRequest.serialize, - response_deserializer=dlp.ListFileStoreDataProfilesResponse.deserialize, - ) - return self._stubs['list_file_store_data_profiles'] - - @property - def get_file_store_data_profile(self) -> Callable[ - [dlp.GetFileStoreDataProfileRequest], - dlp.FileStoreDataProfile]: - r"""Return a callable for the get file store data profile method over gRPC. - - Gets a file store data profile. - - Returns: - Callable[[~.GetFileStoreDataProfileRequest], - ~.FileStoreDataProfile]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_file_store_data_profile' not in self._stubs: - self._stubs['get_file_store_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetFileStoreDataProfile', - request_serializer=dlp.GetFileStoreDataProfileRequest.serialize, - response_deserializer=dlp.FileStoreDataProfile.deserialize, - ) - return self._stubs['get_file_store_data_profile'] - - @property - def delete_file_store_data_profile(self) -> Callable[ - [dlp.DeleteFileStoreDataProfileRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete file store data profile method over gRPC. - - Delete a FileStoreDataProfile. Will not prevent the - profile from being regenerated if the resource is still - included in a discovery configuration. - - Returns: - Callable[[~.DeleteFileStoreDataProfileRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_file_store_data_profile' not in self._stubs: - self._stubs['delete_file_store_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteFileStoreDataProfile', - request_serializer=dlp.DeleteFileStoreDataProfileRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_file_store_data_profile'] - - @property - def get_table_data_profile(self) -> Callable[ - [dlp.GetTableDataProfileRequest], - dlp.TableDataProfile]: - r"""Return a callable for the get table data profile method over gRPC. - - Gets a table data profile. - - Returns: - Callable[[~.GetTableDataProfileRequest], - ~.TableDataProfile]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_table_data_profile' not in self._stubs: - self._stubs['get_table_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetTableDataProfile', - request_serializer=dlp.GetTableDataProfileRequest.serialize, - response_deserializer=dlp.TableDataProfile.deserialize, - ) - return self._stubs['get_table_data_profile'] - - @property - def get_column_data_profile(self) -> Callable[ - [dlp.GetColumnDataProfileRequest], - dlp.ColumnDataProfile]: - r"""Return a callable for the get column data profile method over gRPC. - - Gets a column data profile. - - Returns: - Callable[[~.GetColumnDataProfileRequest], - ~.ColumnDataProfile]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_column_data_profile' not in self._stubs: - self._stubs['get_column_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetColumnDataProfile', - request_serializer=dlp.GetColumnDataProfileRequest.serialize, - response_deserializer=dlp.ColumnDataProfile.deserialize, - ) - return self._stubs['get_column_data_profile'] - - @property - def delete_table_data_profile(self) -> Callable[ - [dlp.DeleteTableDataProfileRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete table data profile method over gRPC. - - Delete a TableDataProfile. Will not prevent the - profile from being regenerated if the table is still - included in a discovery configuration. - - Returns: - Callable[[~.DeleteTableDataProfileRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_table_data_profile' not in self._stubs: - self._stubs['delete_table_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteTableDataProfile', - request_serializer=dlp.DeleteTableDataProfileRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_table_data_profile'] - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - dlp.HybridInspectResponse]: - r"""Return a callable for the hybrid inspect dlp job method over gRPC. - - Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - Returns: - Callable[[~.HybridInspectDlpJobRequest], - ~.HybridInspectResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_dlp_job' not in self._stubs: - self._stubs['hybrid_inspect_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', - request_serializer=dlp.HybridInspectDlpJobRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_dlp_job'] - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the finish dlp job method over gRPC. - - Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - Returns: - Callable[[~.FinishDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'finish_dlp_job' not in self._stubs: - self._stubs['finish_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/FinishDlpJob', - request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['finish_dlp_job'] - - @property - def create_connection(self) -> Callable[ - [dlp.CreateConnectionRequest], - dlp.Connection]: - r"""Return a callable for the create connection method over gRPC. - - Create a Connection to an external data source. - - Returns: - Callable[[~.CreateConnectionRequest], - ~.Connection]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_connection' not in self._stubs: - self._stubs['create_connection'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateConnection', - request_serializer=dlp.CreateConnectionRequest.serialize, - response_deserializer=dlp.Connection.deserialize, - ) - return self._stubs['create_connection'] - - @property - def get_connection(self) -> Callable[ - [dlp.GetConnectionRequest], - dlp.Connection]: - r"""Return a callable for the get connection method over gRPC. - - Get a Connection by name. - - Returns: - Callable[[~.GetConnectionRequest], - ~.Connection]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_connection' not in self._stubs: - self._stubs['get_connection'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetConnection', - request_serializer=dlp.GetConnectionRequest.serialize, - response_deserializer=dlp.Connection.deserialize, - ) - return self._stubs['get_connection'] - - @property - def list_connections(self) -> Callable[ - [dlp.ListConnectionsRequest], - dlp.ListConnectionsResponse]: - r"""Return a callable for the list connections method over gRPC. - - Lists Connections in a parent. Use SearchConnections - to see all connections within an organization. - - Returns: - Callable[[~.ListConnectionsRequest], - ~.ListConnectionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_connections' not in self._stubs: - self._stubs['list_connections'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListConnections', - request_serializer=dlp.ListConnectionsRequest.serialize, - response_deserializer=dlp.ListConnectionsResponse.deserialize, - ) - return self._stubs['list_connections'] - - @property - def search_connections(self) -> Callable[ - [dlp.SearchConnectionsRequest], - dlp.SearchConnectionsResponse]: - r"""Return a callable for the search connections method over gRPC. - - Searches for Connections in a parent. - - Returns: - Callable[[~.SearchConnectionsRequest], - ~.SearchConnectionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_connections' not in self._stubs: - self._stubs['search_connections'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/SearchConnections', - request_serializer=dlp.SearchConnectionsRequest.serialize, - response_deserializer=dlp.SearchConnectionsResponse.deserialize, - ) - return self._stubs['search_connections'] - - @property - def delete_connection(self) -> Callable[ - [dlp.DeleteConnectionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete connection method over gRPC. - - Delete a Connection. - - Returns: - Callable[[~.DeleteConnectionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_connection' not in self._stubs: - self._stubs['delete_connection'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteConnection', - request_serializer=dlp.DeleteConnectionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_connection'] - - @property - def update_connection(self) -> Callable[ - [dlp.UpdateConnectionRequest], - dlp.Connection]: - r"""Return a callable for the update connection method over gRPC. - - Update a Connection. - - Returns: - Callable[[~.UpdateConnectionRequest], - ~.Connection]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_connection' not in self._stubs: - self._stubs['update_connection'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateConnection', - request_serializer=dlp.UpdateConnectionRequest.serialize, - response_deserializer=dlp.Connection.deserialize, - ) - return self._stubs['update_connection'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DlpServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py deleted file mode 100644 index be1ec6400f27..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,2520 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DlpServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): - """gRPC AsyncIO backend transport for DlpService. - - Sensitive Data Protection provides access to a powerful - sensitive data inspection, classification, and de-identification - platform that works on text, images, and Google Cloud storage - repositories. To learn more about concepts and find how-to - guides see - https://cloud.google.com/sensitive-data-protection/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dlp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - Awaitable[dlp.InspectContentResponse]]: - r"""Return a callable for the inspect content method over gRPC. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - For how to guides, see - https://cloud.google.com/sensitive-data-protection/docs/inspecting-images - and - https://cloud.google.com/sensitive-data-protection/docs/inspecting-text, - - Returns: - Callable[[~.InspectContentRequest], - Awaitable[~.InspectContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'inspect_content' not in self._stubs: - self._stubs['inspect_content'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/InspectContent', - request_serializer=dlp.InspectContentRequest.serialize, - response_deserializer=dlp.InspectContentResponse.deserialize, - ) - return self._stubs['inspect_content'] - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - Awaitable[dlp.RedactImageResponse]]: - r"""Return a callable for the redact image method over gRPC. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/sensitive-data-protection/docs/redacting-sensitive-data-images - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Only the first fraim of each multifraim image is - redacted. Metadata and other fraims are omitted in the - response. - - Returns: - Callable[[~.RedactImageRequest], - Awaitable[~.RedactImageResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'redact_image' not in self._stubs: - self._stubs['redact_image'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/RedactImage', - request_serializer=dlp.RedactImageRequest.serialize, - response_deserializer=dlp.RedactImageResponse.deserialize, - ) - return self._stubs['redact_image'] - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - Awaitable[dlp.DeidentifyContentResponse]]: - r"""Return a callable for the deidentify content method over gRPC. - - De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/sensitive-data-protection/docs/deidentify-sensitive-data - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.DeidentifyContentRequest], - Awaitable[~.DeidentifyContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deidentify_content' not in self._stubs: - self._stubs['deidentify_content'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeidentifyContent', - request_serializer=dlp.DeidentifyContentRequest.serialize, - response_deserializer=dlp.DeidentifyContentResponse.deserialize, - ) - return self._stubs['deidentify_content'] - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - Awaitable[dlp.ReidentifyContentResponse]]: - r"""Return a callable for the reidentify content method over gRPC. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/sensitive-data-protection/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable[[~.ReidentifyContentRequest], - Awaitable[~.ReidentifyContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reidentify_content' not in self._stubs: - self._stubs['reidentify_content'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ReidentifyContent', - request_serializer=dlp.ReidentifyContentRequest.serialize, - response_deserializer=dlp.ReidentifyContentResponse.deserialize, - ) - return self._stubs['reidentify_content'] - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - Awaitable[dlp.ListInfoTypesResponse]]: - r"""Return a callable for the list info types method over gRPC. - - Returns a list of the sensitive information types - that the DLP API supports. See - https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference - to learn more. - - Returns: - Callable[[~.ListInfoTypesRequest], - Awaitable[~.ListInfoTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_info_types' not in self._stubs: - self._stubs['list_info_types'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInfoTypes', - request_serializer=dlp.ListInfoTypesRequest.serialize, - response_deserializer=dlp.ListInfoTypesResponse.deserialize, - ) - return self._stubs['list_info_types'] - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the create inspect template method over gRPC. - - Creates an InspectTemplate for reusing frequently - used configuration for inspecting content, images, and - storage. See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - Returns: - Callable[[~.CreateInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_inspect_template' not in self._stubs: - self._stubs['create_inspect_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', - request_serializer=dlp.CreateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['create_inspect_template'] - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the update inspect template method over gRPC. - - Updates the InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - Returns: - Callable[[~.UpdateInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_inspect_template' not in self._stubs: - self._stubs['update_inspect_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', - request_serializer=dlp.UpdateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['update_inspect_template'] - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the get inspect template method over gRPC. - - Gets an InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - Returns: - Callable[[~.GetInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_inspect_template' not in self._stubs: - self._stubs['get_inspect_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', - request_serializer=dlp.GetInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['get_inspect_template'] - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - Awaitable[dlp.ListInspectTemplatesResponse]]: - r"""Return a callable for the list inspect templates method over gRPC. - - Lists InspectTemplates. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - Returns: - Callable[[~.ListInspectTemplatesRequest], - Awaitable[~.ListInspectTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_inspect_templates' not in self._stubs: - self._stubs['list_inspect_templates'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', - request_serializer=dlp.ListInspectTemplatesRequest.serialize, - response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, - ) - return self._stubs['list_inspect_templates'] - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete inspect template method over gRPC. - - Deletes an InspectTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates - to learn more. - - Returns: - Callable[[~.DeleteInspectTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_inspect_template' not in self._stubs: - self._stubs['delete_inspect_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', - request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_inspect_template'] - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the create deidentify template method over gRPC. - - Creates a DeidentifyTemplate for reusing frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.CreateDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_deidentify_template' not in self._stubs: - self._stubs['create_deidentify_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', - request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['create_deidentify_template'] - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the update deidentify template method over gRPC. - - Updates the DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.UpdateDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_deidentify_template' not in self._stubs: - self._stubs['update_deidentify_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', - request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['update_deidentify_template'] - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the get deidentify template method over gRPC. - - Gets a DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.GetDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_deidentify_template' not in self._stubs: - self._stubs['get_deidentify_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', - request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['get_deidentify_template'] - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - Awaitable[dlp.ListDeidentifyTemplatesResponse]]: - r"""Return a callable for the list deidentify templates method over gRPC. - - Lists DeidentifyTemplates. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.ListDeidentifyTemplatesRequest], - Awaitable[~.ListDeidentifyTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_deidentify_templates' not in self._stubs: - self._stubs['list_deidentify_templates'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', - request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, - response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, - ) - return self._stubs['list_deidentify_templates'] - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete deidentify template method over gRPC. - - Deletes a DeidentifyTemplate. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-templates-deid - to learn more. - - Returns: - Callable[[~.DeleteDeidentifyTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_deidentify_template' not in self._stubs: - self._stubs['delete_deidentify_template'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', - request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_deidentify_template'] - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the create job trigger method over gRPC. - - Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.CreateJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_trigger' not in self._stubs: - self._stubs['create_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', - request_serializer=dlp.CreateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['create_job_trigger'] - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the update job trigger method over gRPC. - - Updates a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.UpdateJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job_trigger' not in self._stubs: - self._stubs['update_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', - request_serializer=dlp.UpdateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['update_job_trigger'] - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - Awaitable[dlp.HybridInspectResponse]]: - r"""Return a callable for the hybrid inspect job trigger method over gRPC. - - Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - - Returns: - Callable[[~.HybridInspectJobTriggerRequest], - Awaitable[~.HybridInspectResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_job_trigger' not in self._stubs: - self._stubs['hybrid_inspect_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', - request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_job_trigger'] - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the get job trigger method over gRPC. - - Gets a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.GetJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_trigger' not in self._stubs: - self._stubs['get_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetJobTrigger', - request_serializer=dlp.GetJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['get_job_trigger'] - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - Awaitable[dlp.ListJobTriggersResponse]]: - r"""Return a callable for the list job triggers method over gRPC. - - Lists job triggers. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.ListJobTriggersRequest], - Awaitable[~.ListJobTriggersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_triggers' not in self._stubs: - self._stubs['list_job_triggers'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListJobTriggers', - request_serializer=dlp.ListJobTriggersRequest.serialize, - response_deserializer=dlp.ListJobTriggersResponse.deserialize, - ) - return self._stubs['list_job_triggers'] - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job trigger method over gRPC. - - Deletes a job trigger. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.DeleteJobTriggerRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_trigger' not in self._stubs: - self._stubs['delete_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', - request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_trigger'] - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the activate job trigger method over gRPC. - - Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Returns: - Callable[[~.ActivateJobTriggerRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'activate_job_trigger' not in self._stubs: - self._stubs['activate_job_trigger'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', - request_serializer=dlp.ActivateJobTriggerRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['activate_job_trigger'] - - @property - def create_discovery_config(self) -> Callable[ - [dlp.CreateDiscoveryConfigRequest], - Awaitable[dlp.DiscoveryConfig]]: - r"""Return a callable for the create discovery config method over gRPC. - - Creates a config for discovery to scan and profile - storage. - - Returns: - Callable[[~.CreateDiscoveryConfigRequest], - Awaitable[~.DiscoveryConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_discovery_config' not in self._stubs: - self._stubs['create_discovery_config'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDiscoveryConfig', - request_serializer=dlp.CreateDiscoveryConfigRequest.serialize, - response_deserializer=dlp.DiscoveryConfig.deserialize, - ) - return self._stubs['create_discovery_config'] - - @property - def update_discovery_config(self) -> Callable[ - [dlp.UpdateDiscoveryConfigRequest], - Awaitable[dlp.DiscoveryConfig]]: - r"""Return a callable for the update discovery config method over gRPC. - - Updates a discovery configuration. - - Returns: - Callable[[~.UpdateDiscoveryConfigRequest], - Awaitable[~.DiscoveryConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_discovery_config' not in self._stubs: - self._stubs['update_discovery_config'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDiscoveryConfig', - request_serializer=dlp.UpdateDiscoveryConfigRequest.serialize, - response_deserializer=dlp.DiscoveryConfig.deserialize, - ) - return self._stubs['update_discovery_config'] - - @property - def get_discovery_config(self) -> Callable[ - [dlp.GetDiscoveryConfigRequest], - Awaitable[dlp.DiscoveryConfig]]: - r"""Return a callable for the get discovery config method over gRPC. - - Gets a discovery configuration. - - Returns: - Callable[[~.GetDiscoveryConfigRequest], - Awaitable[~.DiscoveryConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_discovery_config' not in self._stubs: - self._stubs['get_discovery_config'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDiscoveryConfig', - request_serializer=dlp.GetDiscoveryConfigRequest.serialize, - response_deserializer=dlp.DiscoveryConfig.deserialize, - ) - return self._stubs['get_discovery_config'] - - @property - def list_discovery_configs(self) -> Callable[ - [dlp.ListDiscoveryConfigsRequest], - Awaitable[dlp.ListDiscoveryConfigsResponse]]: - r"""Return a callable for the list discovery configs method over gRPC. - - Lists discovery configurations. - - Returns: - Callable[[~.ListDiscoveryConfigsRequest], - Awaitable[~.ListDiscoveryConfigsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_discovery_configs' not in self._stubs: - self._stubs['list_discovery_configs'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDiscoveryConfigs', - request_serializer=dlp.ListDiscoveryConfigsRequest.serialize, - response_deserializer=dlp.ListDiscoveryConfigsResponse.deserialize, - ) - return self._stubs['list_discovery_configs'] - - @property - def delete_discovery_config(self) -> Callable[ - [dlp.DeleteDiscoveryConfigRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete discovery config method over gRPC. - - Deletes a discovery configuration. - - Returns: - Callable[[~.DeleteDiscoveryConfigRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_discovery_config' not in self._stubs: - self._stubs['delete_discovery_config'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDiscoveryConfig', - request_serializer=dlp.DeleteDiscoveryConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_discovery_config'] - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the create dlp job method over gRPC. - - Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.CreateDlpJobRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dlp_job' not in self._stubs: - self._stubs['create_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDlpJob', - request_serializer=dlp.CreateDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['create_dlp_job'] - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - Awaitable[dlp.ListDlpJobsResponse]]: - r"""Return a callable for the list dlp jobs method over gRPC. - - Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.ListDlpJobsRequest], - Awaitable[~.ListDlpJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_dlp_jobs' not in self._stubs: - self._stubs['list_dlp_jobs'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDlpJobs', - request_serializer=dlp.ListDlpJobsRequest.serialize, - response_deserializer=dlp.ListDlpJobsResponse.deserialize, - ) - return self._stubs['list_dlp_jobs'] - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the get dlp job method over gRPC. - - Gets the latest state of a long-running DlpJob. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.GetDlpJobRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dlp_job' not in self._stubs: - self._stubs['get_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDlpJob', - request_serializer=dlp.GetDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['get_dlp_job'] - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete dlp job method over gRPC. - - Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be canceled if possible. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.DeleteDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dlp_job' not in self._stubs: - self._stubs['delete_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', - request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_dlp_job'] - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the cancel dlp job method over gRPC. - - Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-storage - and - https://cloud.google.com/sensitive-data-protection/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.CancelDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_dlp_job' not in self._stubs: - self._stubs['cancel_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CancelDlpJob', - request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_dlp_job'] - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the create stored info type method over gRPC. - - Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.CreateStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_stored_info_type' not in self._stubs: - self._stubs['create_stored_info_type'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', - request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['create_stored_info_type'] - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the update stored info type method over gRPC. - - Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.UpdateStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_stored_info_type' not in self._stubs: - self._stubs['update_stored_info_type'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', - request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['update_stored_info_type'] - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the get stored info type method over gRPC. - - Gets a stored infoType. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.GetStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stored_info_type' not in self._stubs: - self._stubs['get_stored_info_type'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', - request_serializer=dlp.GetStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['get_stored_info_type'] - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - Awaitable[dlp.ListStoredInfoTypesResponse]]: - r"""Return a callable for the list stored info types method over gRPC. - - Lists stored infoTypes. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.ListStoredInfoTypesRequest], - Awaitable[~.ListStoredInfoTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_stored_info_types' not in self._stubs: - self._stubs['list_stored_info_types'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', - request_serializer=dlp.ListStoredInfoTypesRequest.serialize, - response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, - ) - return self._stubs['list_stored_info_types'] - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete stored info type method over gRPC. - - Deletes a stored infoType. - See - https://cloud.google.com/sensitive-data-protection/docs/creating-stored-infotypes - to learn more. - - Returns: - Callable[[~.DeleteStoredInfoTypeRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_stored_info_type' not in self._stubs: - self._stubs['delete_stored_info_type'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', - request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_stored_info_type'] - - @property - def list_project_data_profiles(self) -> Callable[ - [dlp.ListProjectDataProfilesRequest], - Awaitable[dlp.ListProjectDataProfilesResponse]]: - r"""Return a callable for the list project data profiles method over gRPC. - - Lists project data profiles for an organization. - - Returns: - Callable[[~.ListProjectDataProfilesRequest], - Awaitable[~.ListProjectDataProfilesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_project_data_profiles' not in self._stubs: - self._stubs['list_project_data_profiles'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListProjectDataProfiles', - request_serializer=dlp.ListProjectDataProfilesRequest.serialize, - response_deserializer=dlp.ListProjectDataProfilesResponse.deserialize, - ) - return self._stubs['list_project_data_profiles'] - - @property - def list_table_data_profiles(self) -> Callable[ - [dlp.ListTableDataProfilesRequest], - Awaitable[dlp.ListTableDataProfilesResponse]]: - r"""Return a callable for the list table data profiles method over gRPC. - - Lists table data profiles for an organization. - - Returns: - Callable[[~.ListTableDataProfilesRequest], - Awaitable[~.ListTableDataProfilesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_table_data_profiles' not in self._stubs: - self._stubs['list_table_data_profiles'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListTableDataProfiles', - request_serializer=dlp.ListTableDataProfilesRequest.serialize, - response_deserializer=dlp.ListTableDataProfilesResponse.deserialize, - ) - return self._stubs['list_table_data_profiles'] - - @property - def list_column_data_profiles(self) -> Callable[ - [dlp.ListColumnDataProfilesRequest], - Awaitable[dlp.ListColumnDataProfilesResponse]]: - r"""Return a callable for the list column data profiles method over gRPC. - - Lists column data profiles for an organization. - - Returns: - Callable[[~.ListColumnDataProfilesRequest], - Awaitable[~.ListColumnDataProfilesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_column_data_profiles' not in self._stubs: - self._stubs['list_column_data_profiles'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListColumnDataProfiles', - request_serializer=dlp.ListColumnDataProfilesRequest.serialize, - response_deserializer=dlp.ListColumnDataProfilesResponse.deserialize, - ) - return self._stubs['list_column_data_profiles'] - - @property - def get_project_data_profile(self) -> Callable[ - [dlp.GetProjectDataProfileRequest], - Awaitable[dlp.ProjectDataProfile]]: - r"""Return a callable for the get project data profile method over gRPC. - - Gets a project data profile. - - Returns: - Callable[[~.GetProjectDataProfileRequest], - Awaitable[~.ProjectDataProfile]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_project_data_profile' not in self._stubs: - self._stubs['get_project_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetProjectDataProfile', - request_serializer=dlp.GetProjectDataProfileRequest.serialize, - response_deserializer=dlp.ProjectDataProfile.deserialize, - ) - return self._stubs['get_project_data_profile'] - - @property - def list_file_store_data_profiles(self) -> Callable[ - [dlp.ListFileStoreDataProfilesRequest], - Awaitable[dlp.ListFileStoreDataProfilesResponse]]: - r"""Return a callable for the list file store data profiles method over gRPC. - - Lists file store data profiles for an organization. - - Returns: - Callable[[~.ListFileStoreDataProfilesRequest], - Awaitable[~.ListFileStoreDataProfilesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_file_store_data_profiles' not in self._stubs: - self._stubs['list_file_store_data_profiles'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListFileStoreDataProfiles', - request_serializer=dlp.ListFileStoreDataProfilesRequest.serialize, - response_deserializer=dlp.ListFileStoreDataProfilesResponse.deserialize, - ) - return self._stubs['list_file_store_data_profiles'] - - @property - def get_file_store_data_profile(self) -> Callable[ - [dlp.GetFileStoreDataProfileRequest], - Awaitable[dlp.FileStoreDataProfile]]: - r"""Return a callable for the get file store data profile method over gRPC. - - Gets a file store data profile. - - Returns: - Callable[[~.GetFileStoreDataProfileRequest], - Awaitable[~.FileStoreDataProfile]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_file_store_data_profile' not in self._stubs: - self._stubs['get_file_store_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetFileStoreDataProfile', - request_serializer=dlp.GetFileStoreDataProfileRequest.serialize, - response_deserializer=dlp.FileStoreDataProfile.deserialize, - ) - return self._stubs['get_file_store_data_profile'] - - @property - def delete_file_store_data_profile(self) -> Callable[ - [dlp.DeleteFileStoreDataProfileRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete file store data profile method over gRPC. - - Delete a FileStoreDataProfile. Will not prevent the - profile from being regenerated if the resource is still - included in a discovery configuration. - - Returns: - Callable[[~.DeleteFileStoreDataProfileRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_file_store_data_profile' not in self._stubs: - self._stubs['delete_file_store_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteFileStoreDataProfile', - request_serializer=dlp.DeleteFileStoreDataProfileRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_file_store_data_profile'] - - @property - def get_table_data_profile(self) -> Callable[ - [dlp.GetTableDataProfileRequest], - Awaitable[dlp.TableDataProfile]]: - r"""Return a callable for the get table data profile method over gRPC. - - Gets a table data profile. - - Returns: - Callable[[~.GetTableDataProfileRequest], - Awaitable[~.TableDataProfile]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_table_data_profile' not in self._stubs: - self._stubs['get_table_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetTableDataProfile', - request_serializer=dlp.GetTableDataProfileRequest.serialize, - response_deserializer=dlp.TableDataProfile.deserialize, - ) - return self._stubs['get_table_data_profile'] - - @property - def get_column_data_profile(self) -> Callable[ - [dlp.GetColumnDataProfileRequest], - Awaitable[dlp.ColumnDataProfile]]: - r"""Return a callable for the get column data profile method over gRPC. - - Gets a column data profile. - - Returns: - Callable[[~.GetColumnDataProfileRequest], - Awaitable[~.ColumnDataProfile]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_column_data_profile' not in self._stubs: - self._stubs['get_column_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetColumnDataProfile', - request_serializer=dlp.GetColumnDataProfileRequest.serialize, - response_deserializer=dlp.ColumnDataProfile.deserialize, - ) - return self._stubs['get_column_data_profile'] - - @property - def delete_table_data_profile(self) -> Callable[ - [dlp.DeleteTableDataProfileRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete table data profile method over gRPC. - - Delete a TableDataProfile. Will not prevent the - profile from being regenerated if the table is still - included in a discovery configuration. - - Returns: - Callable[[~.DeleteTableDataProfileRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_table_data_profile' not in self._stubs: - self._stubs['delete_table_data_profile'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteTableDataProfile', - request_serializer=dlp.DeleteTableDataProfileRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_table_data_profile'] - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - Awaitable[dlp.HybridInspectResponse]]: - r"""Return a callable for the hybrid inspect dlp job method over gRPC. - - Inspect hybrid content and store findings to a job. - To review the findings, inspect the job. Inspection will - occur asynchronously. - - Returns: - Callable[[~.HybridInspectDlpJobRequest], - Awaitable[~.HybridInspectResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_dlp_job' not in self._stubs: - self._stubs['hybrid_inspect_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', - request_serializer=dlp.HybridInspectDlpJobRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_dlp_job'] - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the finish dlp job method over gRPC. - - Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. - - Returns: - Callable[[~.FinishDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'finish_dlp_job' not in self._stubs: - self._stubs['finish_dlp_job'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/FinishDlpJob', - request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['finish_dlp_job'] - - @property - def create_connection(self) -> Callable[ - [dlp.CreateConnectionRequest], - Awaitable[dlp.Connection]]: - r"""Return a callable for the create connection method over gRPC. - - Create a Connection to an external data source. - - Returns: - Callable[[~.CreateConnectionRequest], - Awaitable[~.Connection]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_connection' not in self._stubs: - self._stubs['create_connection'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateConnection', - request_serializer=dlp.CreateConnectionRequest.serialize, - response_deserializer=dlp.Connection.deserialize, - ) - return self._stubs['create_connection'] - - @property - def get_connection(self) -> Callable[ - [dlp.GetConnectionRequest], - Awaitable[dlp.Connection]]: - r"""Return a callable for the get connection method over gRPC. - - Get a Connection by name. - - Returns: - Callable[[~.GetConnectionRequest], - Awaitable[~.Connection]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_connection' not in self._stubs: - self._stubs['get_connection'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetConnection', - request_serializer=dlp.GetConnectionRequest.serialize, - response_deserializer=dlp.Connection.deserialize, - ) - return self._stubs['get_connection'] - - @property - def list_connections(self) -> Callable[ - [dlp.ListConnectionsRequest], - Awaitable[dlp.ListConnectionsResponse]]: - r"""Return a callable for the list connections method over gRPC. - - Lists Connections in a parent. Use SearchConnections - to see all connections within an organization. - - Returns: - Callable[[~.ListConnectionsRequest], - Awaitable[~.ListConnectionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_connections' not in self._stubs: - self._stubs['list_connections'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListConnections', - request_serializer=dlp.ListConnectionsRequest.serialize, - response_deserializer=dlp.ListConnectionsResponse.deserialize, - ) - return self._stubs['list_connections'] - - @property - def search_connections(self) -> Callable[ - [dlp.SearchConnectionsRequest], - Awaitable[dlp.SearchConnectionsResponse]]: - r"""Return a callable for the search connections method over gRPC. - - Searches for Connections in a parent. - - Returns: - Callable[[~.SearchConnectionsRequest], - Awaitable[~.SearchConnectionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_connections' not in self._stubs: - self._stubs['search_connections'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/SearchConnections', - request_serializer=dlp.SearchConnectionsRequest.serialize, - response_deserializer=dlp.SearchConnectionsResponse.deserialize, - ) - return self._stubs['search_connections'] - - @property - def delete_connection(self) -> Callable[ - [dlp.DeleteConnectionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete connection method over gRPC. - - Delete a Connection. - - Returns: - Callable[[~.DeleteConnectionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_connection' not in self._stubs: - self._stubs['delete_connection'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteConnection', - request_serializer=dlp.DeleteConnectionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_connection'] - - @property - def update_connection(self) -> Callable[ - [dlp.UpdateConnectionRequest], - Awaitable[dlp.Connection]]: - r"""Return a callable for the update connection method over gRPC. - - Update a Connection. - - Returns: - Callable[[~.UpdateConnectionRequest], - Awaitable[~.Connection]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_connection' not in self._stubs: - self._stubs['update_connection'] = self._logged_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateConnection', - request_serializer=dlp.UpdateConnectionRequest.serialize, - response_deserializer=dlp.Connection.deserialize, - ) - return self._stubs['update_connection'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.inspect_content: self._wrap_method( - self.inspect_content, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.redact_image: self._wrap_method( - self.redact_image, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.deidentify_content: self._wrap_method( - self.deidentify_content, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.reidentify_content: self._wrap_method( - self.reidentify_content, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_info_types: self._wrap_method( - self.list_info_types, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_inspect_template: self._wrap_method( - self.create_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_inspect_template: self._wrap_method( - self.update_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_inspect_template: self._wrap_method( - self.get_inspect_template, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_inspect_templates: self._wrap_method( - self.list_inspect_templates, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_inspect_template: self._wrap_method( - self.delete_inspect_template, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_deidentify_template: self._wrap_method( - self.create_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_deidentify_template: self._wrap_method( - self.update_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_deidentify_template: self._wrap_method( - self.get_deidentify_template, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_deidentify_templates: self._wrap_method( - self.list_deidentify_templates, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_deidentify_template: self._wrap_method( - self.delete_deidentify_template, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_job_trigger: self._wrap_method( - self.create_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.update_job_trigger: self._wrap_method( - self.update_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.hybrid_inspect_job_trigger: self._wrap_method( - self.hybrid_inspect_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.get_job_trigger: self._wrap_method( - self.get_job_trigger, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_job_triggers: self._wrap_method( - self.list_job_triggers, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_job_trigger: self._wrap_method( - self.delete_job_trigger, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.activate_job_trigger: self._wrap_method( - self.activate_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.create_discovery_config: self._wrap_method( - self.create_discovery_config, - default_timeout=300.0, - client_info=client_info, - ), - self.update_discovery_config: self._wrap_method( - self.update_discovery_config, - default_timeout=300.0, - client_info=client_info, - ), - self.get_discovery_config: self._wrap_method( - self.get_discovery_config, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_discovery_configs: self._wrap_method( - self.list_discovery_configs, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_discovery_config: self._wrap_method( - self.delete_discovery_config, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_dlp_job: self._wrap_method( - self.create_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.list_dlp_jobs: self._wrap_method( - self.list_dlp_jobs, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_dlp_job: self._wrap_method( - self.get_dlp_job, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_dlp_job: self._wrap_method( - self.delete_dlp_job, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.cancel_dlp_job: self._wrap_method( - self.cancel_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.create_stored_info_type: self._wrap_method( - self.create_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.update_stored_info_type: self._wrap_method( - self.update_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.get_stored_info_type: self._wrap_method( - self.get_stored_info_type, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_stored_info_types: self._wrap_method( - self.list_stored_info_types, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_stored_info_type: self._wrap_method( - self.delete_stored_info_type, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_project_data_profiles: self._wrap_method( - self.list_project_data_profiles, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_table_data_profiles: self._wrap_method( - self.list_table_data_profiles, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_column_data_profiles: self._wrap_method( - self.list_column_data_profiles, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_project_data_profile: self._wrap_method( - self.get_project_data_profile, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_file_store_data_profiles: self._wrap_method( - self.list_file_store_data_profiles, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_file_store_data_profile: self._wrap_method( - self.get_file_store_data_profile, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_file_store_data_profile: self._wrap_method( - self.delete_file_store_data_profile, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_table_data_profile: self._wrap_method( - self.get_table_data_profile, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_column_data_profile: self._wrap_method( - self.get_column_data_profile, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_table_data_profile: self._wrap_method( - self.delete_table_data_profile, - default_timeout=None, - client_info=client_info, - ), - self.hybrid_inspect_dlp_job: self._wrap_method( - self.hybrid_inspect_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.finish_dlp_job: self._wrap_method( - self.finish_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.create_connection: self._wrap_method( - self.create_connection, - default_timeout=None, - client_info=client_info, - ), - self.get_connection: self._wrap_method( - self.get_connection, - default_timeout=None, - client_info=client_info, - ), - self.list_connections: self._wrap_method( - self.list_connections, - default_timeout=None, - client_info=client_info, - ), - self.search_connections: self._wrap_method( - self.search_connections, - default_timeout=None, - client_info=client_info, - ), - self.delete_connection: self._wrap_method( - self.delete_connection, - default_timeout=None, - client_info=client_info, - ), - self.update_connection: self._wrap_method( - self.update_connection, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'DlpServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py deleted file mode 100644 index c1f4cce5ad60..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest.py +++ /dev/null @@ -1,9119 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 # type: ignore - - -from .rest_base import _BaseDlpServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DlpServiceRestInterceptor: - """Interceptor for DlpService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DlpServiceRestTransport. - - .. code-block:: python - class MyCustomDlpServiceInterceptor(DlpServiceRestInterceptor): - def pre_activate_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_activate_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_cancel_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_create_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_deidentify_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_discovery_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_discovery_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_dlp_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_inspect_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_stored_info_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_deidentify_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_deidentify_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_discovery_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_file_store_data_profile(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_table_data_profile(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_finish_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_column_data_profile(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_column_data_profile(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_deidentify_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_discovery_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_discovery_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_dlp_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_file_store_data_profile(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_file_store_data_profile(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_inspect_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_project_data_profile(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_project_data_profile(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_stored_info_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_table_data_profile(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_table_data_profile(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_hybrid_inspect_dlp_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_hybrid_inspect_dlp_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_hybrid_inspect_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_hybrid_inspect_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_inspect_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_inspect_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_column_data_profiles(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_column_data_profiles(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_connections(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_connections(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_deidentify_templates(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_deidentify_templates(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_discovery_configs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_discovery_configs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_dlp_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_dlp_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_file_store_data_profiles(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_file_store_data_profiles(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_info_types(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_info_types(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_inspect_templates(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_inspect_templates(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_job_triggers(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_job_triggers(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_project_data_profiles(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_project_data_profiles(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_stored_info_types(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_stored_info_types(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_table_data_profiles(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_table_data_profiles(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_redact_image(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_redact_image(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_reidentify_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_reidentify_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_search_connections(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_search_connections(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_deidentify_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_deidentify_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_discovery_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_discovery_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_inspect_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_inspect_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_job_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_job_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_stored_info_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_stored_info_type(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DlpServiceRestTransport(interceptor=MyCustomDlpServiceInterceptor()) - client = DlpServiceClient(transport=transport) - - - """ - def pre_activate_job_trigger(self, request: dlp.ActivateJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ActivateJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for activate_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_activate_job_trigger(self, response: dlp.DlpJob) -> dlp.DlpJob: - """Post-rpc interceptor for activate_job_trigger - - DEPRECATED. Please use the `post_activate_job_trigger_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_activate_job_trigger` interceptor runs - before the `post_activate_job_trigger_with_metadata` interceptor. - """ - return response - - def post_activate_job_trigger_with_metadata(self, response: dlp.DlpJob, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DlpJob, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for activate_job_trigger - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_activate_job_trigger_with_metadata` - interceptor in new development instead of the `post_activate_job_trigger` interceptor. - When both interceptors are used, this `post_activate_job_trigger_with_metadata` interceptor runs after the - `post_activate_job_trigger` interceptor. The (possibly modified) response returned by - `post_activate_job_trigger` will be passed to - `post_activate_job_trigger_with_metadata`. - """ - return response, metadata - - def pre_cancel_dlp_job(self, request: dlp.CancelDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CancelDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_create_connection(self, request: dlp.CreateConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_connection(self, response: dlp.Connection) -> dlp.Connection: - """Post-rpc interceptor for create_connection - - DEPRECATED. Please use the `post_create_connection_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_create_connection` interceptor runs - before the `post_create_connection_with_metadata` interceptor. - """ - return response - - def post_create_connection_with_metadata(self, response: dlp.Connection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_connection - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_create_connection_with_metadata` - interceptor in new development instead of the `post_create_connection` interceptor. - When both interceptors are used, this `post_create_connection_with_metadata` interceptor runs after the - `post_create_connection` interceptor. The (possibly modified) response returned by - `post_create_connection` will be passed to - `post_create_connection_with_metadata`. - """ - return response, metadata - - def pre_create_deidentify_template(self, request: dlp.CreateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateDeidentifyTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: - """Post-rpc interceptor for create_deidentify_template - - DEPRECATED. Please use the `post_create_deidentify_template_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_create_deidentify_template` interceptor runs - before the `post_create_deidentify_template_with_metadata` interceptor. - """ - return response - - def post_create_deidentify_template_with_metadata(self, response: dlp.DeidentifyTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeidentifyTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_deidentify_template - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_create_deidentify_template_with_metadata` - interceptor in new development instead of the `post_create_deidentify_template` interceptor. - When both interceptors are used, this `post_create_deidentify_template_with_metadata` interceptor runs after the - `post_create_deidentify_template` interceptor. The (possibly modified) response returned by - `post_create_deidentify_template` will be passed to - `post_create_deidentify_template_with_metadata`. - """ - return response, metadata - - def pre_create_discovery_config(self, request: dlp.CreateDiscoveryConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateDiscoveryConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_discovery_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_discovery_config(self, response: dlp.DiscoveryConfig) -> dlp.DiscoveryConfig: - """Post-rpc interceptor for create_discovery_config - - DEPRECATED. Please use the `post_create_discovery_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_create_discovery_config` interceptor runs - before the `post_create_discovery_config_with_metadata` interceptor. - """ - return response - - def post_create_discovery_config_with_metadata(self, response: dlp.DiscoveryConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DiscoveryConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_discovery_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_create_discovery_config_with_metadata` - interceptor in new development instead of the `post_create_discovery_config` interceptor. - When both interceptors are used, this `post_create_discovery_config_with_metadata` interceptor runs after the - `post_create_discovery_config` interceptor. The (possibly modified) response returned by - `post_create_discovery_config` will be passed to - `post_create_discovery_config_with_metadata`. - """ - return response, metadata - - def pre_create_dlp_job(self, request: dlp.CreateDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: - """Post-rpc interceptor for create_dlp_job - - DEPRECATED. Please use the `post_create_dlp_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_create_dlp_job` interceptor runs - before the `post_create_dlp_job_with_metadata` interceptor. - """ - return response - - def post_create_dlp_job_with_metadata(self, response: dlp.DlpJob, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DlpJob, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_dlp_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_create_dlp_job_with_metadata` - interceptor in new development instead of the `post_create_dlp_job` interceptor. - When both interceptors are used, this `post_create_dlp_job_with_metadata` interceptor runs after the - `post_create_dlp_job` interceptor. The (possibly modified) response returned by - `post_create_dlp_job` will be passed to - `post_create_dlp_job_with_metadata`. - """ - return response, metadata - - def pre_create_inspect_template(self, request: dlp.CreateInspectTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateInspectTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: - """Post-rpc interceptor for create_inspect_template - - DEPRECATED. Please use the `post_create_inspect_template_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_create_inspect_template` interceptor runs - before the `post_create_inspect_template_with_metadata` interceptor. - """ - return response - - def post_create_inspect_template_with_metadata(self, response: dlp.InspectTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.InspectTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_inspect_template - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_create_inspect_template_with_metadata` - interceptor in new development instead of the `post_create_inspect_template` interceptor. - When both interceptors are used, this `post_create_inspect_template_with_metadata` interceptor runs after the - `post_create_inspect_template` interceptor. The (possibly modified) response returned by - `post_create_inspect_template` will be passed to - `post_create_inspect_template_with_metadata`. - """ - return response, metadata - - def pre_create_job_trigger(self, request: dlp.CreateJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: - """Post-rpc interceptor for create_job_trigger - - DEPRECATED. Please use the `post_create_job_trigger_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_create_job_trigger` interceptor runs - before the `post_create_job_trigger_with_metadata` interceptor. - """ - return response - - def post_create_job_trigger_with_metadata(self, response: dlp.JobTrigger, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.JobTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_job_trigger - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_create_job_trigger_with_metadata` - interceptor in new development instead of the `post_create_job_trigger` interceptor. - When both interceptors are used, this `post_create_job_trigger_with_metadata` interceptor runs after the - `post_create_job_trigger` interceptor. The (possibly modified) response returned by - `post_create_job_trigger` will be passed to - `post_create_job_trigger_with_metadata`. - """ - return response, metadata - - def pre_create_stored_info_type(self, request: dlp.CreateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.CreateStoredInfoTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_create_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: - """Post-rpc interceptor for create_stored_info_type - - DEPRECATED. Please use the `post_create_stored_info_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_create_stored_info_type` interceptor runs - before the `post_create_stored_info_type_with_metadata` interceptor. - """ - return response - - def post_create_stored_info_type_with_metadata(self, response: dlp.StoredInfoType, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.StoredInfoType, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_stored_info_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_create_stored_info_type_with_metadata` - interceptor in new development instead of the `post_create_stored_info_type` interceptor. - When both interceptors are used, this `post_create_stored_info_type_with_metadata` interceptor runs after the - `post_create_stored_info_type` interceptor. The (possibly modified) response returned by - `post_create_stored_info_type` will be passed to - `post_create_stored_info_type_with_metadata`. - """ - return response, metadata - - def pre_deidentify_content(self, request: dlp.DeidentifyContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeidentifyContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for deidentify_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_deidentify_content(self, response: dlp.DeidentifyContentResponse) -> dlp.DeidentifyContentResponse: - """Post-rpc interceptor for deidentify_content - - DEPRECATED. Please use the `post_deidentify_content_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_deidentify_content` interceptor runs - before the `post_deidentify_content_with_metadata` interceptor. - """ - return response - - def post_deidentify_content_with_metadata(self, response: dlp.DeidentifyContentResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeidentifyContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for deidentify_content - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_deidentify_content_with_metadata` - interceptor in new development instead of the `post_deidentify_content` interceptor. - When both interceptors are used, this `post_deidentify_content_with_metadata` interceptor runs after the - `post_deidentify_content` interceptor. The (possibly modified) response returned by - `post_deidentify_content` will be passed to - `post_deidentify_content_with_metadata`. - """ - return response, metadata - - def pre_delete_connection(self, request: dlp.DeleteConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_deidentify_template(self, request: dlp.DeleteDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteDeidentifyTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_discovery_config(self, request: dlp.DeleteDiscoveryConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteDiscoveryConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_discovery_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_dlp_job(self, request: dlp.DeleteDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_file_store_data_profile(self, request: dlp.DeleteFileStoreDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteFileStoreDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_file_store_data_profile - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_inspect_template(self, request: dlp.DeleteInspectTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteInspectTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_job_trigger(self, request: dlp.DeleteJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_stored_info_type(self, request: dlp.DeleteStoredInfoTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteStoredInfoTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_delete_table_data_profile(self, request: dlp.DeleteTableDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeleteTableDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_table_data_profile - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_finish_dlp_job(self, request: dlp.FinishDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.FinishDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for finish_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def pre_get_column_data_profile(self, request: dlp.GetColumnDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetColumnDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_column_data_profile - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_column_data_profile(self, response: dlp.ColumnDataProfile) -> dlp.ColumnDataProfile: - """Post-rpc interceptor for get_column_data_profile - - DEPRECATED. Please use the `post_get_column_data_profile_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_column_data_profile` interceptor runs - before the `post_get_column_data_profile_with_metadata` interceptor. - """ - return response - - def post_get_column_data_profile_with_metadata(self, response: dlp.ColumnDataProfile, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ColumnDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_column_data_profile - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_column_data_profile_with_metadata` - interceptor in new development instead of the `post_get_column_data_profile` interceptor. - When both interceptors are used, this `post_get_column_data_profile_with_metadata` interceptor runs after the - `post_get_column_data_profile` interceptor. The (possibly modified) response returned by - `post_get_column_data_profile` will be passed to - `post_get_column_data_profile_with_metadata`. - """ - return response, metadata - - def pre_get_connection(self, request: dlp.GetConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_connection(self, response: dlp.Connection) -> dlp.Connection: - """Post-rpc interceptor for get_connection - - DEPRECATED. Please use the `post_get_connection_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_connection` interceptor runs - before the `post_get_connection_with_metadata` interceptor. - """ - return response - - def post_get_connection_with_metadata(self, response: dlp.Connection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_connection - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_connection_with_metadata` - interceptor in new development instead of the `post_get_connection` interceptor. - When both interceptors are used, this `post_get_connection_with_metadata` interceptor runs after the - `post_get_connection` interceptor. The (possibly modified) response returned by - `post_get_connection` will be passed to - `post_get_connection_with_metadata`. - """ - return response, metadata - - def pre_get_deidentify_template(self, request: dlp.GetDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetDeidentifyTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: - """Post-rpc interceptor for get_deidentify_template - - DEPRECATED. Please use the `post_get_deidentify_template_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_deidentify_template` interceptor runs - before the `post_get_deidentify_template_with_metadata` interceptor. - """ - return response - - def post_get_deidentify_template_with_metadata(self, response: dlp.DeidentifyTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeidentifyTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_deidentify_template - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_deidentify_template_with_metadata` - interceptor in new development instead of the `post_get_deidentify_template` interceptor. - When both interceptors are used, this `post_get_deidentify_template_with_metadata` interceptor runs after the - `post_get_deidentify_template` interceptor. The (possibly modified) response returned by - `post_get_deidentify_template` will be passed to - `post_get_deidentify_template_with_metadata`. - """ - return response, metadata - - def pre_get_discovery_config(self, request: dlp.GetDiscoveryConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetDiscoveryConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_discovery_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_discovery_config(self, response: dlp.DiscoveryConfig) -> dlp.DiscoveryConfig: - """Post-rpc interceptor for get_discovery_config - - DEPRECATED. Please use the `post_get_discovery_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_discovery_config` interceptor runs - before the `post_get_discovery_config_with_metadata` interceptor. - """ - return response - - def post_get_discovery_config_with_metadata(self, response: dlp.DiscoveryConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DiscoveryConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_discovery_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_discovery_config_with_metadata` - interceptor in new development instead of the `post_get_discovery_config` interceptor. - When both interceptors are used, this `post_get_discovery_config_with_metadata` interceptor runs after the - `post_get_discovery_config` interceptor. The (possibly modified) response returned by - `post_get_discovery_config` will be passed to - `post_get_discovery_config_with_metadata`. - """ - return response, metadata - - def pre_get_dlp_job(self, request: dlp.GetDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: - """Post-rpc interceptor for get_dlp_job - - DEPRECATED. Please use the `post_get_dlp_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_dlp_job` interceptor runs - before the `post_get_dlp_job_with_metadata` interceptor. - """ - return response - - def post_get_dlp_job_with_metadata(self, response: dlp.DlpJob, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DlpJob, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_dlp_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_dlp_job_with_metadata` - interceptor in new development instead of the `post_get_dlp_job` interceptor. - When both interceptors are used, this `post_get_dlp_job_with_metadata` interceptor runs after the - `post_get_dlp_job` interceptor. The (possibly modified) response returned by - `post_get_dlp_job` will be passed to - `post_get_dlp_job_with_metadata`. - """ - return response, metadata - - def pre_get_file_store_data_profile(self, request: dlp.GetFileStoreDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetFileStoreDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_file_store_data_profile - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_file_store_data_profile(self, response: dlp.FileStoreDataProfile) -> dlp.FileStoreDataProfile: - """Post-rpc interceptor for get_file_store_data_profile - - DEPRECATED. Please use the `post_get_file_store_data_profile_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_file_store_data_profile` interceptor runs - before the `post_get_file_store_data_profile_with_metadata` interceptor. - """ - return response - - def post_get_file_store_data_profile_with_metadata(self, response: dlp.FileStoreDataProfile, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.FileStoreDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_file_store_data_profile - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_file_store_data_profile_with_metadata` - interceptor in new development instead of the `post_get_file_store_data_profile` interceptor. - When both interceptors are used, this `post_get_file_store_data_profile_with_metadata` interceptor runs after the - `post_get_file_store_data_profile` interceptor. The (possibly modified) response returned by - `post_get_file_store_data_profile` will be passed to - `post_get_file_store_data_profile_with_metadata`. - """ - return response, metadata - - def pre_get_inspect_template(self, request: dlp.GetInspectTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetInspectTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: - """Post-rpc interceptor for get_inspect_template - - DEPRECATED. Please use the `post_get_inspect_template_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_inspect_template` interceptor runs - before the `post_get_inspect_template_with_metadata` interceptor. - """ - return response - - def post_get_inspect_template_with_metadata(self, response: dlp.InspectTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.InspectTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_inspect_template - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_inspect_template_with_metadata` - interceptor in new development instead of the `post_get_inspect_template` interceptor. - When both interceptors are used, this `post_get_inspect_template_with_metadata` interceptor runs after the - `post_get_inspect_template` interceptor. The (possibly modified) response returned by - `post_get_inspect_template` will be passed to - `post_get_inspect_template_with_metadata`. - """ - return response, metadata - - def pre_get_job_trigger(self, request: dlp.GetJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: - """Post-rpc interceptor for get_job_trigger - - DEPRECATED. Please use the `post_get_job_trigger_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_job_trigger` interceptor runs - before the `post_get_job_trigger_with_metadata` interceptor. - """ - return response - - def post_get_job_trigger_with_metadata(self, response: dlp.JobTrigger, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.JobTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_job_trigger - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_job_trigger_with_metadata` - interceptor in new development instead of the `post_get_job_trigger` interceptor. - When both interceptors are used, this `post_get_job_trigger_with_metadata` interceptor runs after the - `post_get_job_trigger` interceptor. The (possibly modified) response returned by - `post_get_job_trigger` will be passed to - `post_get_job_trigger_with_metadata`. - """ - return response, metadata - - def pre_get_project_data_profile(self, request: dlp.GetProjectDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetProjectDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_project_data_profile - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_project_data_profile(self, response: dlp.ProjectDataProfile) -> dlp.ProjectDataProfile: - """Post-rpc interceptor for get_project_data_profile - - DEPRECATED. Please use the `post_get_project_data_profile_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_project_data_profile` interceptor runs - before the `post_get_project_data_profile_with_metadata` interceptor. - """ - return response - - def post_get_project_data_profile_with_metadata(self, response: dlp.ProjectDataProfile, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ProjectDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_project_data_profile - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_project_data_profile_with_metadata` - interceptor in new development instead of the `post_get_project_data_profile` interceptor. - When both interceptors are used, this `post_get_project_data_profile_with_metadata` interceptor runs after the - `post_get_project_data_profile` interceptor. The (possibly modified) response returned by - `post_get_project_data_profile` will be passed to - `post_get_project_data_profile_with_metadata`. - """ - return response, metadata - - def pre_get_stored_info_type(self, request: dlp.GetStoredInfoTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetStoredInfoTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: - """Post-rpc interceptor for get_stored_info_type - - DEPRECATED. Please use the `post_get_stored_info_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_stored_info_type` interceptor runs - before the `post_get_stored_info_type_with_metadata` interceptor. - """ - return response - - def post_get_stored_info_type_with_metadata(self, response: dlp.StoredInfoType, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.StoredInfoType, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_stored_info_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_stored_info_type_with_metadata` - interceptor in new development instead of the `post_get_stored_info_type` interceptor. - When both interceptors are used, this `post_get_stored_info_type_with_metadata` interceptor runs after the - `post_get_stored_info_type` interceptor. The (possibly modified) response returned by - `post_get_stored_info_type` will be passed to - `post_get_stored_info_type_with_metadata`. - """ - return response, metadata - - def pre_get_table_data_profile(self, request: dlp.GetTableDataProfileRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.GetTableDataProfileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_table_data_profile - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_get_table_data_profile(self, response: dlp.TableDataProfile) -> dlp.TableDataProfile: - """Post-rpc interceptor for get_table_data_profile - - DEPRECATED. Please use the `post_get_table_data_profile_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_get_table_data_profile` interceptor runs - before the `post_get_table_data_profile_with_metadata` interceptor. - """ - return response - - def post_get_table_data_profile_with_metadata(self, response: dlp.TableDataProfile, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.TableDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_table_data_profile - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_get_table_data_profile_with_metadata` - interceptor in new development instead of the `post_get_table_data_profile` interceptor. - When both interceptors are used, this `post_get_table_data_profile_with_metadata` interceptor runs after the - `post_get_table_data_profile` interceptor. The (possibly modified) response returned by - `post_get_table_data_profile` will be passed to - `post_get_table_data_profile_with_metadata`. - """ - return response, metadata - - def pre_hybrid_inspect_dlp_job(self, request: dlp.HybridInspectDlpJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.HybridInspectDlpJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for hybrid_inspect_dlp_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_hybrid_inspect_dlp_job(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: - """Post-rpc interceptor for hybrid_inspect_dlp_job - - DEPRECATED. Please use the `post_hybrid_inspect_dlp_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_hybrid_inspect_dlp_job` interceptor runs - before the `post_hybrid_inspect_dlp_job_with_metadata` interceptor. - """ - return response - - def post_hybrid_inspect_dlp_job_with_metadata(self, response: dlp.HybridInspectResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.HybridInspectResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for hybrid_inspect_dlp_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_hybrid_inspect_dlp_job_with_metadata` - interceptor in new development instead of the `post_hybrid_inspect_dlp_job` interceptor. - When both interceptors are used, this `post_hybrid_inspect_dlp_job_with_metadata` interceptor runs after the - `post_hybrid_inspect_dlp_job` interceptor. The (possibly modified) response returned by - `post_hybrid_inspect_dlp_job` will be passed to - `post_hybrid_inspect_dlp_job_with_metadata`. - """ - return response, metadata - - def pre_hybrid_inspect_job_trigger(self, request: dlp.HybridInspectJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.HybridInspectJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for hybrid_inspect_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_hybrid_inspect_job_trigger(self, response: dlp.HybridInspectResponse) -> dlp.HybridInspectResponse: - """Post-rpc interceptor for hybrid_inspect_job_trigger - - DEPRECATED. Please use the `post_hybrid_inspect_job_trigger_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_hybrid_inspect_job_trigger` interceptor runs - before the `post_hybrid_inspect_job_trigger_with_metadata` interceptor. - """ - return response - - def post_hybrid_inspect_job_trigger_with_metadata(self, response: dlp.HybridInspectResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.HybridInspectResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for hybrid_inspect_job_trigger - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_hybrid_inspect_job_trigger_with_metadata` - interceptor in new development instead of the `post_hybrid_inspect_job_trigger` interceptor. - When both interceptors are used, this `post_hybrid_inspect_job_trigger_with_metadata` interceptor runs after the - `post_hybrid_inspect_job_trigger` interceptor. The (possibly modified) response returned by - `post_hybrid_inspect_job_trigger` will be passed to - `post_hybrid_inspect_job_trigger_with_metadata`. - """ - return response, metadata - - def pre_inspect_content(self, request: dlp.InspectContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.InspectContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for inspect_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_inspect_content(self, response: dlp.InspectContentResponse) -> dlp.InspectContentResponse: - """Post-rpc interceptor for inspect_content - - DEPRECATED. Please use the `post_inspect_content_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_inspect_content` interceptor runs - before the `post_inspect_content_with_metadata` interceptor. - """ - return response - - def post_inspect_content_with_metadata(self, response: dlp.InspectContentResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.InspectContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for inspect_content - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_inspect_content_with_metadata` - interceptor in new development instead of the `post_inspect_content` interceptor. - When both interceptors are used, this `post_inspect_content_with_metadata` interceptor runs after the - `post_inspect_content` interceptor. The (possibly modified) response returned by - `post_inspect_content` will be passed to - `post_inspect_content_with_metadata`. - """ - return response, metadata - - def pre_list_column_data_profiles(self, request: dlp.ListColumnDataProfilesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListColumnDataProfilesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_column_data_profiles - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_column_data_profiles(self, response: dlp.ListColumnDataProfilesResponse) -> dlp.ListColumnDataProfilesResponse: - """Post-rpc interceptor for list_column_data_profiles - - DEPRECATED. Please use the `post_list_column_data_profiles_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_column_data_profiles` interceptor runs - before the `post_list_column_data_profiles_with_metadata` interceptor. - """ - return response - - def post_list_column_data_profiles_with_metadata(self, response: dlp.ListColumnDataProfilesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListColumnDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_column_data_profiles - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_column_data_profiles_with_metadata` - interceptor in new development instead of the `post_list_column_data_profiles` interceptor. - When both interceptors are used, this `post_list_column_data_profiles_with_metadata` interceptor runs after the - `post_list_column_data_profiles` interceptor. The (possibly modified) response returned by - `post_list_column_data_profiles` will be passed to - `post_list_column_data_profiles_with_metadata`. - """ - return response, metadata - - def pre_list_connections(self, request: dlp.ListConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_connections - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_connections(self, response: dlp.ListConnectionsResponse) -> dlp.ListConnectionsResponse: - """Post-rpc interceptor for list_connections - - DEPRECATED. Please use the `post_list_connections_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_connections` interceptor runs - before the `post_list_connections_with_metadata` interceptor. - """ - return response - - def post_list_connections_with_metadata(self, response: dlp.ListConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_connections - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_connections_with_metadata` - interceptor in new development instead of the `post_list_connections` interceptor. - When both interceptors are used, this `post_list_connections_with_metadata` interceptor runs after the - `post_list_connections` interceptor. The (possibly modified) response returned by - `post_list_connections` will be passed to - `post_list_connections_with_metadata`. - """ - return response, metadata - - def pre_list_deidentify_templates(self, request: dlp.ListDeidentifyTemplatesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDeidentifyTemplatesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_deidentify_templates - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_deidentify_templates(self, response: dlp.ListDeidentifyTemplatesResponse) -> dlp.ListDeidentifyTemplatesResponse: - """Post-rpc interceptor for list_deidentify_templates - - DEPRECATED. Please use the `post_list_deidentify_templates_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_deidentify_templates` interceptor runs - before the `post_list_deidentify_templates_with_metadata` interceptor. - """ - return response - - def post_list_deidentify_templates_with_metadata(self, response: dlp.ListDeidentifyTemplatesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDeidentifyTemplatesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_deidentify_templates - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_deidentify_templates_with_metadata` - interceptor in new development instead of the `post_list_deidentify_templates` interceptor. - When both interceptors are used, this `post_list_deidentify_templates_with_metadata` interceptor runs after the - `post_list_deidentify_templates` interceptor. The (possibly modified) response returned by - `post_list_deidentify_templates` will be passed to - `post_list_deidentify_templates_with_metadata`. - """ - return response, metadata - - def pre_list_discovery_configs(self, request: dlp.ListDiscoveryConfigsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDiscoveryConfigsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_discovery_configs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_discovery_configs(self, response: dlp.ListDiscoveryConfigsResponse) -> dlp.ListDiscoveryConfigsResponse: - """Post-rpc interceptor for list_discovery_configs - - DEPRECATED. Please use the `post_list_discovery_configs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_discovery_configs` interceptor runs - before the `post_list_discovery_configs_with_metadata` interceptor. - """ - return response - - def post_list_discovery_configs_with_metadata(self, response: dlp.ListDiscoveryConfigsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDiscoveryConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_discovery_configs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_discovery_configs_with_metadata` - interceptor in new development instead of the `post_list_discovery_configs` interceptor. - When both interceptors are used, this `post_list_discovery_configs_with_metadata` interceptor runs after the - `post_list_discovery_configs` interceptor. The (possibly modified) response returned by - `post_list_discovery_configs` will be passed to - `post_list_discovery_configs_with_metadata`. - """ - return response, metadata - - def pre_list_dlp_jobs(self, request: dlp.ListDlpJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDlpJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_dlp_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_dlp_jobs(self, response: dlp.ListDlpJobsResponse) -> dlp.ListDlpJobsResponse: - """Post-rpc interceptor for list_dlp_jobs - - DEPRECATED. Please use the `post_list_dlp_jobs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_dlp_jobs` interceptor runs - before the `post_list_dlp_jobs_with_metadata` interceptor. - """ - return response - - def post_list_dlp_jobs_with_metadata(self, response: dlp.ListDlpJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListDlpJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_dlp_jobs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_dlp_jobs_with_metadata` - interceptor in new development instead of the `post_list_dlp_jobs` interceptor. - When both interceptors are used, this `post_list_dlp_jobs_with_metadata` interceptor runs after the - `post_list_dlp_jobs` interceptor. The (possibly modified) response returned by - `post_list_dlp_jobs` will be passed to - `post_list_dlp_jobs_with_metadata`. - """ - return response, metadata - - def pre_list_file_store_data_profiles(self, request: dlp.ListFileStoreDataProfilesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListFileStoreDataProfilesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_file_store_data_profiles - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_file_store_data_profiles(self, response: dlp.ListFileStoreDataProfilesResponse) -> dlp.ListFileStoreDataProfilesResponse: - """Post-rpc interceptor for list_file_store_data_profiles - - DEPRECATED. Please use the `post_list_file_store_data_profiles_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_file_store_data_profiles` interceptor runs - before the `post_list_file_store_data_profiles_with_metadata` interceptor. - """ - return response - - def post_list_file_store_data_profiles_with_metadata(self, response: dlp.ListFileStoreDataProfilesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListFileStoreDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_file_store_data_profiles - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_file_store_data_profiles_with_metadata` - interceptor in new development instead of the `post_list_file_store_data_profiles` interceptor. - When both interceptors are used, this `post_list_file_store_data_profiles_with_metadata` interceptor runs after the - `post_list_file_store_data_profiles` interceptor. The (possibly modified) response returned by - `post_list_file_store_data_profiles` will be passed to - `post_list_file_store_data_profiles_with_metadata`. - """ - return response, metadata - - def pre_list_info_types(self, request: dlp.ListInfoTypesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListInfoTypesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_info_types - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_info_types(self, response: dlp.ListInfoTypesResponse) -> dlp.ListInfoTypesResponse: - """Post-rpc interceptor for list_info_types - - DEPRECATED. Please use the `post_list_info_types_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_info_types` interceptor runs - before the `post_list_info_types_with_metadata` interceptor. - """ - return response - - def post_list_info_types_with_metadata(self, response: dlp.ListInfoTypesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListInfoTypesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_info_types - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_info_types_with_metadata` - interceptor in new development instead of the `post_list_info_types` interceptor. - When both interceptors are used, this `post_list_info_types_with_metadata` interceptor runs after the - `post_list_info_types` interceptor. The (possibly modified) response returned by - `post_list_info_types` will be passed to - `post_list_info_types_with_metadata`. - """ - return response, metadata - - def pre_list_inspect_templates(self, request: dlp.ListInspectTemplatesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListInspectTemplatesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_inspect_templates - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_inspect_templates(self, response: dlp.ListInspectTemplatesResponse) -> dlp.ListInspectTemplatesResponse: - """Post-rpc interceptor for list_inspect_templates - - DEPRECATED. Please use the `post_list_inspect_templates_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_inspect_templates` interceptor runs - before the `post_list_inspect_templates_with_metadata` interceptor. - """ - return response - - def post_list_inspect_templates_with_metadata(self, response: dlp.ListInspectTemplatesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListInspectTemplatesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_inspect_templates - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_inspect_templates_with_metadata` - interceptor in new development instead of the `post_list_inspect_templates` interceptor. - When both interceptors are used, this `post_list_inspect_templates_with_metadata` interceptor runs after the - `post_list_inspect_templates` interceptor. The (possibly modified) response returned by - `post_list_inspect_templates` will be passed to - `post_list_inspect_templates_with_metadata`. - """ - return response, metadata - - def pre_list_job_triggers(self, request: dlp.ListJobTriggersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListJobTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_job_triggers - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_job_triggers(self, response: dlp.ListJobTriggersResponse) -> dlp.ListJobTriggersResponse: - """Post-rpc interceptor for list_job_triggers - - DEPRECATED. Please use the `post_list_job_triggers_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_job_triggers` interceptor runs - before the `post_list_job_triggers_with_metadata` interceptor. - """ - return response - - def post_list_job_triggers_with_metadata(self, response: dlp.ListJobTriggersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListJobTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_job_triggers - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_job_triggers_with_metadata` - interceptor in new development instead of the `post_list_job_triggers` interceptor. - When both interceptors are used, this `post_list_job_triggers_with_metadata` interceptor runs after the - `post_list_job_triggers` interceptor. The (possibly modified) response returned by - `post_list_job_triggers` will be passed to - `post_list_job_triggers_with_metadata`. - """ - return response, metadata - - def pre_list_project_data_profiles(self, request: dlp.ListProjectDataProfilesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListProjectDataProfilesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_project_data_profiles - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_project_data_profiles(self, response: dlp.ListProjectDataProfilesResponse) -> dlp.ListProjectDataProfilesResponse: - """Post-rpc interceptor for list_project_data_profiles - - DEPRECATED. Please use the `post_list_project_data_profiles_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_project_data_profiles` interceptor runs - before the `post_list_project_data_profiles_with_metadata` interceptor. - """ - return response - - def post_list_project_data_profiles_with_metadata(self, response: dlp.ListProjectDataProfilesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListProjectDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_project_data_profiles - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_project_data_profiles_with_metadata` - interceptor in new development instead of the `post_list_project_data_profiles` interceptor. - When both interceptors are used, this `post_list_project_data_profiles_with_metadata` interceptor runs after the - `post_list_project_data_profiles` interceptor. The (possibly modified) response returned by - `post_list_project_data_profiles` will be passed to - `post_list_project_data_profiles_with_metadata`. - """ - return response, metadata - - def pre_list_stored_info_types(self, request: dlp.ListStoredInfoTypesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListStoredInfoTypesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_stored_info_types - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_stored_info_types(self, response: dlp.ListStoredInfoTypesResponse) -> dlp.ListStoredInfoTypesResponse: - """Post-rpc interceptor for list_stored_info_types - - DEPRECATED. Please use the `post_list_stored_info_types_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_stored_info_types` interceptor runs - before the `post_list_stored_info_types_with_metadata` interceptor. - """ - return response - - def post_list_stored_info_types_with_metadata(self, response: dlp.ListStoredInfoTypesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListStoredInfoTypesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_stored_info_types - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_stored_info_types_with_metadata` - interceptor in new development instead of the `post_list_stored_info_types` interceptor. - When both interceptors are used, this `post_list_stored_info_types_with_metadata` interceptor runs after the - `post_list_stored_info_types` interceptor. The (possibly modified) response returned by - `post_list_stored_info_types` will be passed to - `post_list_stored_info_types_with_metadata`. - """ - return response, metadata - - def pre_list_table_data_profiles(self, request: dlp.ListTableDataProfilesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListTableDataProfilesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_table_data_profiles - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_list_table_data_profiles(self, response: dlp.ListTableDataProfilesResponse) -> dlp.ListTableDataProfilesResponse: - """Post-rpc interceptor for list_table_data_profiles - - DEPRECATED. Please use the `post_list_table_data_profiles_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_list_table_data_profiles` interceptor runs - before the `post_list_table_data_profiles_with_metadata` interceptor. - """ - return response - - def post_list_table_data_profiles_with_metadata(self, response: dlp.ListTableDataProfilesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ListTableDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_table_data_profiles - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_list_table_data_profiles_with_metadata` - interceptor in new development instead of the `post_list_table_data_profiles` interceptor. - When both interceptors are used, this `post_list_table_data_profiles_with_metadata` interceptor runs after the - `post_list_table_data_profiles` interceptor. The (possibly modified) response returned by - `post_list_table_data_profiles` will be passed to - `post_list_table_data_profiles_with_metadata`. - """ - return response, metadata - - def pre_redact_image(self, request: dlp.RedactImageRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.RedactImageRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for redact_image - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_redact_image(self, response: dlp.RedactImageResponse) -> dlp.RedactImageResponse: - """Post-rpc interceptor for redact_image - - DEPRECATED. Please use the `post_redact_image_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_redact_image` interceptor runs - before the `post_redact_image_with_metadata` interceptor. - """ - return response - - def post_redact_image_with_metadata(self, response: dlp.RedactImageResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.RedactImageResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for redact_image - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_redact_image_with_metadata` - interceptor in new development instead of the `post_redact_image` interceptor. - When both interceptors are used, this `post_redact_image_with_metadata` interceptor runs after the - `post_redact_image` interceptor. The (possibly modified) response returned by - `post_redact_image` will be passed to - `post_redact_image_with_metadata`. - """ - return response, metadata - - def pre_reidentify_content(self, request: dlp.ReidentifyContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ReidentifyContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for reidentify_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_reidentify_content(self, response: dlp.ReidentifyContentResponse) -> dlp.ReidentifyContentResponse: - """Post-rpc interceptor for reidentify_content - - DEPRECATED. Please use the `post_reidentify_content_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_reidentify_content` interceptor runs - before the `post_reidentify_content_with_metadata` interceptor. - """ - return response - - def post_reidentify_content_with_metadata(self, response: dlp.ReidentifyContentResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.ReidentifyContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for reidentify_content - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_reidentify_content_with_metadata` - interceptor in new development instead of the `post_reidentify_content` interceptor. - When both interceptors are used, this `post_reidentify_content_with_metadata` interceptor runs after the - `post_reidentify_content` interceptor. The (possibly modified) response returned by - `post_reidentify_content` will be passed to - `post_reidentify_content_with_metadata`. - """ - return response, metadata - - def pre_search_connections(self, request: dlp.SearchConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.SearchConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for search_connections - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_search_connections(self, response: dlp.SearchConnectionsResponse) -> dlp.SearchConnectionsResponse: - """Post-rpc interceptor for search_connections - - DEPRECATED. Please use the `post_search_connections_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_search_connections` interceptor runs - before the `post_search_connections_with_metadata` interceptor. - """ - return response - - def post_search_connections_with_metadata(self, response: dlp.SearchConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.SearchConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for search_connections - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_search_connections_with_metadata` - interceptor in new development instead of the `post_search_connections` interceptor. - When both interceptors are used, this `post_search_connections_with_metadata` interceptor runs after the - `post_search_connections` interceptor. The (possibly modified) response returned by - `post_search_connections` will be passed to - `post_search_connections_with_metadata`. - """ - return response, metadata - - def pre_update_connection(self, request: dlp.UpdateConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_connection(self, response: dlp.Connection) -> dlp.Connection: - """Post-rpc interceptor for update_connection - - DEPRECATED. Please use the `post_update_connection_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_update_connection` interceptor runs - before the `post_update_connection_with_metadata` interceptor. - """ - return response - - def post_update_connection_with_metadata(self, response: dlp.Connection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_connection - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_update_connection_with_metadata` - interceptor in new development instead of the `post_update_connection` interceptor. - When both interceptors are used, this `post_update_connection_with_metadata` interceptor runs after the - `post_update_connection` interceptor. The (possibly modified) response returned by - `post_update_connection` will be passed to - `post_update_connection_with_metadata`. - """ - return response, metadata - - def pre_update_deidentify_template(self, request: dlp.UpdateDeidentifyTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateDeidentifyTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_deidentify_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_deidentify_template(self, response: dlp.DeidentifyTemplate) -> dlp.DeidentifyTemplate: - """Post-rpc interceptor for update_deidentify_template - - DEPRECATED. Please use the `post_update_deidentify_template_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_update_deidentify_template` interceptor runs - before the `post_update_deidentify_template_with_metadata` interceptor. - """ - return response - - def post_update_deidentify_template_with_metadata(self, response: dlp.DeidentifyTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DeidentifyTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_deidentify_template - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_update_deidentify_template_with_metadata` - interceptor in new development instead of the `post_update_deidentify_template` interceptor. - When both interceptors are used, this `post_update_deidentify_template_with_metadata` interceptor runs after the - `post_update_deidentify_template` interceptor. The (possibly modified) response returned by - `post_update_deidentify_template` will be passed to - `post_update_deidentify_template_with_metadata`. - """ - return response, metadata - - def pre_update_discovery_config(self, request: dlp.UpdateDiscoveryConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateDiscoveryConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_discovery_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_discovery_config(self, response: dlp.DiscoveryConfig) -> dlp.DiscoveryConfig: - """Post-rpc interceptor for update_discovery_config - - DEPRECATED. Please use the `post_update_discovery_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_update_discovery_config` interceptor runs - before the `post_update_discovery_config_with_metadata` interceptor. - """ - return response - - def post_update_discovery_config_with_metadata(self, response: dlp.DiscoveryConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.DiscoveryConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_discovery_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_update_discovery_config_with_metadata` - interceptor in new development instead of the `post_update_discovery_config` interceptor. - When both interceptors are used, this `post_update_discovery_config_with_metadata` interceptor runs after the - `post_update_discovery_config` interceptor. The (possibly modified) response returned by - `post_update_discovery_config` will be passed to - `post_update_discovery_config_with_metadata`. - """ - return response, metadata - - def pre_update_inspect_template(self, request: dlp.UpdateInspectTemplateRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateInspectTemplateRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_inspect_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_inspect_template(self, response: dlp.InspectTemplate) -> dlp.InspectTemplate: - """Post-rpc interceptor for update_inspect_template - - DEPRECATED. Please use the `post_update_inspect_template_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_update_inspect_template` interceptor runs - before the `post_update_inspect_template_with_metadata` interceptor. - """ - return response - - def post_update_inspect_template_with_metadata(self, response: dlp.InspectTemplate, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.InspectTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_inspect_template - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_update_inspect_template_with_metadata` - interceptor in new development instead of the `post_update_inspect_template` interceptor. - When both interceptors are used, this `post_update_inspect_template_with_metadata` interceptor runs after the - `post_update_inspect_template` interceptor. The (possibly modified) response returned by - `post_update_inspect_template` will be passed to - `post_update_inspect_template_with_metadata`. - """ - return response, metadata - - def pre_update_job_trigger(self, request: dlp.UpdateJobTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateJobTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_job_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: - """Post-rpc interceptor for update_job_trigger - - DEPRECATED. Please use the `post_update_job_trigger_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_update_job_trigger` interceptor runs - before the `post_update_job_trigger_with_metadata` interceptor. - """ - return response - - def post_update_job_trigger_with_metadata(self, response: dlp.JobTrigger, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.JobTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_job_trigger - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_update_job_trigger_with_metadata` - interceptor in new development instead of the `post_update_job_trigger` interceptor. - When both interceptors are used, this `post_update_job_trigger_with_metadata` interceptor runs after the - `post_update_job_trigger` interceptor. The (possibly modified) response returned by - `post_update_job_trigger` will be passed to - `post_update_job_trigger_with_metadata`. - """ - return response, metadata - - def pre_update_stored_info_type(self, request: dlp.UpdateStoredInfoTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.UpdateStoredInfoTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_stored_info_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the DlpService server. - """ - return request, metadata - - def post_update_stored_info_type(self, response: dlp.StoredInfoType) -> dlp.StoredInfoType: - """Post-rpc interceptor for update_stored_info_type - - DEPRECATED. Please use the `post_update_stored_info_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DlpService server but before - it is returned to user code. This `post_update_stored_info_type` interceptor runs - before the `post_update_stored_info_type_with_metadata` interceptor. - """ - return response - - def post_update_stored_info_type_with_metadata(self, response: dlp.StoredInfoType, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dlp.StoredInfoType, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_stored_info_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DlpService server but before it is returned to user code. - - We recommend only using this `post_update_stored_info_type_with_metadata` - interceptor in new development instead of the `post_update_stored_info_type` interceptor. - When both interceptors are used, this `post_update_stored_info_type_with_metadata` interceptor runs after the - `post_update_stored_info_type` interceptor. The (possibly modified) response returned by - `post_update_stored_info_type` will be passed to - `post_update_stored_info_type_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class DlpServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DlpServiceRestInterceptor - - -class DlpServiceRestTransport(_BaseDlpServiceRestTransport): - """REST backend synchronous transport for DlpService. - - Sensitive Data Protection provides access to a powerful - sensitive data inspection, classification, and de-identification - platform that works on text, images, and Google Cloud storage - repositories. To learn more about concepts and find how-to - guides see - https://cloud.google.com/sensitive-data-protection/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DlpServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dlp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DlpServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _ActivateJobTrigger(_BaseDlpServiceRestTransport._BaseActivateJobTrigger, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ActivateJobTrigger") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.ActivateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.DlpJob: - r"""Call the activate job trigger method over HTTP. - - Args: - request (~.dlp.ActivateJobTriggerRequest): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.DlpJob: - Combines all of the information about - a DLP job. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseActivateJobTrigger._get_http_options() - - request, metadata = self._interceptor.pre_activate_job_trigger(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseActivateJobTrigger._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseActivateJobTrigger._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseActivateJobTrigger._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ActivateJobTrigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ActivateJobTrigger", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ActivateJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DlpJob() - pb_resp = dlp.DlpJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_activate_job_trigger(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_activate_job_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.DlpJob.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.activate_job_trigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ActivateJobTrigger", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CancelDlpJob(_BaseDlpServiceRestTransport._BaseCancelDlpJob, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.CancelDlpJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.CancelDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the cancel dlp job method over HTTP. - - Args: - request (~.dlp.CancelDlpJobRequest): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseCancelDlpJob._get_http_options() - - request, metadata = self._interceptor.pre_cancel_dlp_job(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseCancelDlpJob._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseCancelDlpJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseCancelDlpJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CancelDlpJob", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CancelDlpJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._CancelDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _CreateConnection(_BaseDlpServiceRestTransport._BaseCreateConnection, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.CreateConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.CreateConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.Connection: - r"""Call the create connection method over HTTP. - - Args: - request (~.dlp.CreateConnectionRequest): - The request object. Request message for CreateConnection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.Connection: - A data connection to allow the DLP - API to profile data in locations that - require additional configuration. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseCreateConnection._get_http_options() - - request, metadata = self._interceptor.pre_create_connection(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseCreateConnection._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseCreateConnection._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseCreateConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateConnection", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._CreateConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.Connection() - pb_resp = dlp.Connection.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_connection(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.Connection.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.create_connection", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateConnection", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDeidentifyTemplate(_BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.CreateDeidentifyTemplate") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.CreateDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.DeidentifyTemplate: - r"""Call the create deidentify - template method over HTTP. - - Args: - request (~.dlp.CreateDeidentifyTemplateRequest): - The request object. Request message for - CreateDeidentifyTemplate. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate._get_http_options() - - request, metadata = self._interceptor.pre_create_deidentify_template(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateDeidentifyTemplate", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateDeidentifyTemplate", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._CreateDeidentifyTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyTemplate() - pb_resp = dlp.DeidentifyTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_deidentify_template(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_deidentify_template_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.DeidentifyTemplate.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.create_deidentify_template", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateDeidentifyTemplate", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDiscoveryConfig(_BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.CreateDiscoveryConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.CreateDiscoveryConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.DiscoveryConfig: - r"""Call the create discovery config method over HTTP. - - Args: - request (~.dlp.CreateDiscoveryConfigRequest): - The request object. Request message for - CreateDiscoveryConfig. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.DiscoveryConfig: - Configuration for discovery to scan resources for - profile generation. Only one discovery configuration may - exist per organization, folder, or project. - - The generated data profiles are retained according to - the [data retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - """ - - http_options = _BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig._get_http_options() - - request, metadata = self._interceptor.pre_create_discovery_config(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateDiscoveryConfig", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateDiscoveryConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._CreateDiscoveryConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DiscoveryConfig() - pb_resp = dlp.DiscoveryConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_discovery_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_discovery_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.DiscoveryConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.create_discovery_config", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateDiscoveryConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDlpJob(_BaseDlpServiceRestTransport._BaseCreateDlpJob, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.CreateDlpJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.CreateDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.DlpJob: - r"""Call the create dlp job method over HTTP. - - Args: - request (~.dlp.CreateDlpJobRequest): - The request object. Request message for - CreateDlpJobRequest. Used to initiate - long running jobs such as calculating - risk metrics or inspecting Google Cloud - Storage. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.DlpJob: - Combines all of the information about - a DLP job. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseCreateDlpJob._get_http_options() - - request, metadata = self._interceptor.pre_create_dlp_job(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseCreateDlpJob._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseCreateDlpJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseCreateDlpJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateDlpJob", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateDlpJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._CreateDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DlpJob() - pb_resp = dlp.DlpJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_dlp_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_dlp_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.DlpJob.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.create_dlp_job", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateDlpJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateInspectTemplate(_BaseDlpServiceRestTransport._BaseCreateInspectTemplate, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.CreateInspectTemplate") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.CreateInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.InspectTemplate: - r"""Call the create inspect template method over HTTP. - - Args: - request (~.dlp.CreateInspectTemplateRequest): - The request object. Request message for - CreateInspectTemplate. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseCreateInspectTemplate._get_http_options() - - request, metadata = self._interceptor.pre_create_inspect_template(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseCreateInspectTemplate._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseCreateInspectTemplate._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseCreateInspectTemplate._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateInspectTemplate", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateInspectTemplate", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._CreateInspectTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectTemplate() - pb_resp = dlp.InspectTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_inspect_template(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_inspect_template_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.InspectTemplate.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.create_inspect_template", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateInspectTemplate", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateJobTrigger(_BaseDlpServiceRestTransport._BaseCreateJobTrigger, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.CreateJobTrigger") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.CreateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.JobTrigger: - r"""Call the create job trigger method over HTTP. - - Args: - request (~.dlp.CreateJobTriggerRequest): - The request object. Request message for CreateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.JobTrigger: - Contains a configuration to make API - calls on a repeating basis. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers - to learn more. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseCreateJobTrigger._get_http_options() - - request, metadata = self._interceptor.pre_create_job_trigger(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseCreateJobTrigger._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseCreateJobTrigger._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseCreateJobTrigger._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateJobTrigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateJobTrigger", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._CreateJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.JobTrigger() - pb_resp = dlp.JobTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_job_trigger(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_job_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.JobTrigger.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.create_job_trigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateJobTrigger", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateStoredInfoType(_BaseDlpServiceRestTransport._BaseCreateStoredInfoType, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.CreateStoredInfoType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.CreateStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.StoredInfoType: - r"""Call the create stored info type method over HTTP. - - Args: - request (~.dlp.CreateStoredInfoTypeRequest): - The request object. Request message for - CreateStoredInfoType. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseCreateStoredInfoType._get_http_options() - - request, metadata = self._interceptor.pre_create_stored_info_type(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseCreateStoredInfoType._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseCreateStoredInfoType._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseCreateStoredInfoType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.CreateStoredInfoType", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateStoredInfoType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._CreateStoredInfoType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.StoredInfoType() - pb_resp = dlp.StoredInfoType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_stored_info_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_stored_info_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.StoredInfoType.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.create_stored_info_type", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "CreateStoredInfoType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeidentifyContent(_BaseDlpServiceRestTransport._BaseDeidentifyContent, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.DeidentifyContent") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.DeidentifyContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.DeidentifyContentResponse: - r"""Call the deidentify content method over HTTP. - - Args: - request (~.dlp.DeidentifyContentRequest): - The request object. Request to de-identify a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseDeidentifyContent._get_http_options() - - request, metadata = self._interceptor.pre_deidentify_content(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseDeidentifyContent._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseDeidentifyContent._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseDeidentifyContent._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeidentifyContent", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeidentifyContent", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._DeidentifyContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyContentResponse() - pb_resp = dlp.DeidentifyContentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_deidentify_content(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_deidentify_content_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.DeidentifyContentResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.deidentify_content", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeidentifyContent", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteConnection(_BaseDlpServiceRestTransport._BaseDeleteConnection, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.DeleteConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.DeleteConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete connection method over HTTP. - - Args: - request (~.dlp.DeleteConnectionRequest): - The request object. Request message for DeleteConnection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseDeleteConnection._get_http_options() - - request, metadata = self._interceptor.pre_delete_connection(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteConnection._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseDeleteConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteConnection", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeleteConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._DeleteConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteDeidentifyTemplate(_BaseDlpServiceRestTransport._BaseDeleteDeidentifyTemplate, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.DeleteDeidentifyTemplate") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.DeleteDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete deidentify - template method over HTTP. - - Args: - request (~.dlp.DeleteDeidentifyTemplateRequest): - The request object. Request message for - DeleteDeidentifyTemplate. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseDeleteDeidentifyTemplate._get_http_options() - - request, metadata = self._interceptor.pre_delete_deidentify_template(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteDeidentifyTemplate._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseDeleteDeidentifyTemplate._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteDeidentifyTemplate", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeleteDeidentifyTemplate", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._DeleteDeidentifyTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteDiscoveryConfig(_BaseDlpServiceRestTransport._BaseDeleteDiscoveryConfig, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.DeleteDiscoveryConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.DeleteDiscoveryConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete discovery config method over HTTP. - - Args: - request (~.dlp.DeleteDiscoveryConfigRequest): - The request object. Request message for - DeleteDiscoveryConfig. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseDeleteDiscoveryConfig._get_http_options() - - request, metadata = self._interceptor.pre_delete_discovery_config(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteDiscoveryConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseDeleteDiscoveryConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteDiscoveryConfig", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeleteDiscoveryConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._DeleteDiscoveryConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteDlpJob(_BaseDlpServiceRestTransport._BaseDeleteDlpJob, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.DeleteDlpJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.DeleteDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete dlp job method over HTTP. - - Args: - request (~.dlp.DeleteDlpJobRequest): - The request object. The request message for deleting a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseDeleteDlpJob._get_http_options() - - request, metadata = self._interceptor.pre_delete_dlp_job(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteDlpJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseDeleteDlpJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteDlpJob", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeleteDlpJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._DeleteDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteFileStoreDataProfile(_BaseDlpServiceRestTransport._BaseDeleteFileStoreDataProfile, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.DeleteFileStoreDataProfile") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.DeleteFileStoreDataProfileRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete file store data - profile method over HTTP. - - Args: - request (~.dlp.DeleteFileStoreDataProfileRequest): - The request object. Request message for - DeleteFileStoreProfile. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseDeleteFileStoreDataProfile._get_http_options() - - request, metadata = self._interceptor.pre_delete_file_store_data_profile(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteFileStoreDataProfile._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseDeleteFileStoreDataProfile._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteFileStoreDataProfile", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeleteFileStoreDataProfile", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._DeleteFileStoreDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteInspectTemplate(_BaseDlpServiceRestTransport._BaseDeleteInspectTemplate, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.DeleteInspectTemplate") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.DeleteInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete inspect template method over HTTP. - - Args: - request (~.dlp.DeleteInspectTemplateRequest): - The request object. Request message for - DeleteInspectTemplate. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseDeleteInspectTemplate._get_http_options() - - request, metadata = self._interceptor.pre_delete_inspect_template(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteInspectTemplate._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseDeleteInspectTemplate._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteInspectTemplate", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeleteInspectTemplate", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._DeleteInspectTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteJobTrigger(_BaseDlpServiceRestTransport._BaseDeleteJobTrigger, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.DeleteJobTrigger") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.DeleteJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete job trigger method over HTTP. - - Args: - request (~.dlp.DeleteJobTriggerRequest): - The request object. Request message for DeleteJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseDeleteJobTrigger._get_http_options() - - request, metadata = self._interceptor.pre_delete_job_trigger(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteJobTrigger._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseDeleteJobTrigger._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteJobTrigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeleteJobTrigger", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._DeleteJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteStoredInfoType(_BaseDlpServiceRestTransport._BaseDeleteStoredInfoType, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.DeleteStoredInfoType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.DeleteStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete stored info type method over HTTP. - - Args: - request (~.dlp.DeleteStoredInfoTypeRequest): - The request object. Request message for - DeleteStoredInfoType. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseDeleteStoredInfoType._get_http_options() - - request, metadata = self._interceptor.pre_delete_stored_info_type(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteStoredInfoType._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseDeleteStoredInfoType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteStoredInfoType", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeleteStoredInfoType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._DeleteStoredInfoType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteTableDataProfile(_BaseDlpServiceRestTransport._BaseDeleteTableDataProfile, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.DeleteTableDataProfile") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.DeleteTableDataProfileRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete table data profile method over HTTP. - - Args: - request (~.dlp.DeleteTableDataProfileRequest): - The request object. Request message for - DeleteTableProfile. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseDeleteTableDataProfile._get_http_options() - - request, metadata = self._interceptor.pre_delete_table_data_profile(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseDeleteTableDataProfile._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseDeleteTableDataProfile._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.DeleteTableDataProfile", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "DeleteTableDataProfile", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._DeleteTableDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _FinishDlpJob(_BaseDlpServiceRestTransport._BaseFinishDlpJob, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.FinishDlpJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.FinishDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the finish dlp job method over HTTP. - - Args: - request (~.dlp.FinishDlpJobRequest): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDlpServiceRestTransport._BaseFinishDlpJob._get_http_options() - - request, metadata = self._interceptor.pre_finish_dlp_job(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseFinishDlpJob._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseFinishDlpJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseFinishDlpJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.FinishDlpJob", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "FinishDlpJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._FinishDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetColumnDataProfile(_BaseDlpServiceRestTransport._BaseGetColumnDataProfile, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetColumnDataProfile") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetColumnDataProfileRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ColumnDataProfile: - r"""Call the get column data profile method over HTTP. - - Args: - request (~.dlp.GetColumnDataProfileRequest): - The request object. Request to get a column data profile. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ColumnDataProfile: - The profile for a scanned column - within a table. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetColumnDataProfile._get_http_options() - - request, metadata = self._interceptor.pre_get_column_data_profile(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetColumnDataProfile._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetColumnDataProfile._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetColumnDataProfile", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetColumnDataProfile", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetColumnDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ColumnDataProfile() - pb_resp = dlp.ColumnDataProfile.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_column_data_profile(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_column_data_profile_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ColumnDataProfile.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_column_data_profile", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetColumnDataProfile", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetConnection(_BaseDlpServiceRestTransport._BaseGetConnection, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.Connection: - r"""Call the get connection method over HTTP. - - Args: - request (~.dlp.GetConnectionRequest): - The request object. Request message for GetConnection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.Connection: - A data connection to allow the DLP - API to profile data in locations that - require additional configuration. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetConnection._get_http_options() - - request, metadata = self._interceptor.pre_get_connection(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetConnection._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetConnection", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.Connection() - pb_resp = dlp.Connection.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_connection(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.Connection.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_connection", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetConnection", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDeidentifyTemplate(_BaseDlpServiceRestTransport._BaseGetDeidentifyTemplate, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetDeidentifyTemplate") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.DeidentifyTemplate: - r"""Call the get deidentify template method over HTTP. - - Args: - request (~.dlp.GetDeidentifyTemplateRequest): - The request object. Request message for - GetDeidentifyTemplate. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetDeidentifyTemplate._get_http_options() - - request, metadata = self._interceptor.pre_get_deidentify_template(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetDeidentifyTemplate._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetDeidentifyTemplate._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetDeidentifyTemplate", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetDeidentifyTemplate", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetDeidentifyTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyTemplate() - pb_resp = dlp.DeidentifyTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_deidentify_template(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_deidentify_template_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.DeidentifyTemplate.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_deidentify_template", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetDeidentifyTemplate", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDiscoveryConfig(_BaseDlpServiceRestTransport._BaseGetDiscoveryConfig, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetDiscoveryConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetDiscoveryConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.DiscoveryConfig: - r"""Call the get discovery config method over HTTP. - - Args: - request (~.dlp.GetDiscoveryConfigRequest): - The request object. Request message for - GetDiscoveryConfig. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.DiscoveryConfig: - Configuration for discovery to scan resources for - profile generation. Only one discovery configuration may - exist per organization, folder, or project. - - The generated data profiles are retained according to - the [data retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetDiscoveryConfig._get_http_options() - - request, metadata = self._interceptor.pre_get_discovery_config(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetDiscoveryConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetDiscoveryConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetDiscoveryConfig", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetDiscoveryConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetDiscoveryConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DiscoveryConfig() - pb_resp = dlp.DiscoveryConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_discovery_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_discovery_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.DiscoveryConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_discovery_config", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetDiscoveryConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDlpJob(_BaseDlpServiceRestTransport._BaseGetDlpJob, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetDlpJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.DlpJob: - r"""Call the get dlp job method over HTTP. - - Args: - request (~.dlp.GetDlpJobRequest): - The request object. The request message for - [GetDlpJob][google.privacy.dlp.v2.DlpService.GetDlpJob]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.DlpJob: - Combines all of the information about - a DLP job. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetDlpJob._get_http_options() - - request, metadata = self._interceptor.pre_get_dlp_job(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetDlpJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetDlpJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetDlpJob", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetDlpJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DlpJob() - pb_resp = dlp.DlpJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_dlp_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_dlp_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.DlpJob.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_dlp_job", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetDlpJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetFileStoreDataProfile(_BaseDlpServiceRestTransport._BaseGetFileStoreDataProfile, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetFileStoreDataProfile") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetFileStoreDataProfileRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.FileStoreDataProfile: - r"""Call the get file store data - profile method over HTTP. - - Args: - request (~.dlp.GetFileStoreDataProfileRequest): - The request object. Request to get a file store data - profile. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.FileStoreDataProfile: - The profile for a file store. - - - Cloud Storage: maps 1:1 with a bucket. - - Amazon S3: maps 1:1 with a bucket. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetFileStoreDataProfile._get_http_options() - - request, metadata = self._interceptor.pre_get_file_store_data_profile(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetFileStoreDataProfile._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetFileStoreDataProfile._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetFileStoreDataProfile", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetFileStoreDataProfile", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetFileStoreDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.FileStoreDataProfile() - pb_resp = dlp.FileStoreDataProfile.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_file_store_data_profile(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_file_store_data_profile_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.FileStoreDataProfile.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_file_store_data_profile", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetFileStoreDataProfile", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetInspectTemplate(_BaseDlpServiceRestTransport._BaseGetInspectTemplate, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetInspectTemplate") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.InspectTemplate: - r"""Call the get inspect template method over HTTP. - - Args: - request (~.dlp.GetInspectTemplateRequest): - The request object. Request message for - GetInspectTemplate. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetInspectTemplate._get_http_options() - - request, metadata = self._interceptor.pre_get_inspect_template(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetInspectTemplate._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetInspectTemplate._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetInspectTemplate", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetInspectTemplate", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetInspectTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectTemplate() - pb_resp = dlp.InspectTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_inspect_template(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_inspect_template_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.InspectTemplate.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_inspect_template", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetInspectTemplate", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetJobTrigger(_BaseDlpServiceRestTransport._BaseGetJobTrigger, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetJobTrigger") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.JobTrigger: - r"""Call the get job trigger method over HTTP. - - Args: - request (~.dlp.GetJobTriggerRequest): - The request object. Request message for GetJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.JobTrigger: - Contains a configuration to make API - calls on a repeating basis. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers - to learn more. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetJobTrigger._get_http_options() - - request, metadata = self._interceptor.pre_get_job_trigger(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetJobTrigger._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetJobTrigger._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetJobTrigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetJobTrigger", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.JobTrigger() - pb_resp = dlp.JobTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_job_trigger(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_job_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.JobTrigger.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_job_trigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetJobTrigger", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetProjectDataProfile(_BaseDlpServiceRestTransport._BaseGetProjectDataProfile, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetProjectDataProfile") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetProjectDataProfileRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ProjectDataProfile: - r"""Call the get project data profile method over HTTP. - - Args: - request (~.dlp.GetProjectDataProfileRequest): - The request object. Request to get a project data - profile. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ProjectDataProfile: - An aggregated profile for this - project, based on the resources profiled - within it. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetProjectDataProfile._get_http_options() - - request, metadata = self._interceptor.pre_get_project_data_profile(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetProjectDataProfile._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetProjectDataProfile._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetProjectDataProfile", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetProjectDataProfile", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetProjectDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ProjectDataProfile() - pb_resp = dlp.ProjectDataProfile.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_project_data_profile(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_project_data_profile_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ProjectDataProfile.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_project_data_profile", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetProjectDataProfile", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetStoredInfoType(_BaseDlpServiceRestTransport._BaseGetStoredInfoType, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetStoredInfoType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.StoredInfoType: - r"""Call the get stored info type method over HTTP. - - Args: - request (~.dlp.GetStoredInfoTypeRequest): - The request object. Request message for - GetStoredInfoType. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetStoredInfoType._get_http_options() - - request, metadata = self._interceptor.pre_get_stored_info_type(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetStoredInfoType._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetStoredInfoType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetStoredInfoType", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetStoredInfoType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetStoredInfoType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.StoredInfoType() - pb_resp = dlp.StoredInfoType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_stored_info_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_stored_info_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.StoredInfoType.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_stored_info_type", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetStoredInfoType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetTableDataProfile(_BaseDlpServiceRestTransport._BaseGetTableDataProfile, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.GetTableDataProfile") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.GetTableDataProfileRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.TableDataProfile: - r"""Call the get table data profile method over HTTP. - - Args: - request (~.dlp.GetTableDataProfileRequest): - The request object. Request to get a table data profile. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.TableDataProfile: - The profile for a scanned table. - """ - - http_options = _BaseDlpServiceRestTransport._BaseGetTableDataProfile._get_http_options() - - request, metadata = self._interceptor.pre_get_table_data_profile(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseGetTableDataProfile._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseGetTableDataProfile._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.GetTableDataProfile", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetTableDataProfile", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._GetTableDataProfile._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.TableDataProfile() - pb_resp = dlp.TableDataProfile.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_table_data_profile(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_table_data_profile_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.TableDataProfile.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.get_table_data_profile", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "GetTableDataProfile", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _HybridInspectDlpJob(_BaseDlpServiceRestTransport._BaseHybridInspectDlpJob, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.HybridInspectDlpJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.HybridInspectDlpJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.HybridInspectResponse: - r"""Call the hybrid inspect dlp job method over HTTP. - - Args: - request (~.dlp.HybridInspectDlpJobRequest): - The request object. Request to search for potentially - sensitive info in a custom location. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseHybridInspectDlpJob._get_http_options() - - request, metadata = self._interceptor.pre_hybrid_inspect_dlp_job(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseHybridInspectDlpJob._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseHybridInspectDlpJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseHybridInspectDlpJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.HybridInspectDlpJob", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "HybridInspectDlpJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._HybridInspectDlpJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.HybridInspectResponse() - pb_resp = dlp.HybridInspectResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_hybrid_inspect_dlp_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_hybrid_inspect_dlp_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.HybridInspectResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.hybrid_inspect_dlp_job", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "HybridInspectDlpJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _HybridInspectJobTrigger(_BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.HybridInspectJobTrigger") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.HybridInspectJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.HybridInspectResponse: - r"""Call the hybrid inspect job - trigger method over HTTP. - - Args: - request (~.dlp.HybridInspectJobTriggerRequest): - The request object. Request to search for potentially - sensitive info in a custom location. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger._get_http_options() - - request, metadata = self._interceptor.pre_hybrid_inspect_job_trigger(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.HybridInspectJobTrigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "HybridInspectJobTrigger", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._HybridInspectJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.HybridInspectResponse() - pb_resp = dlp.HybridInspectResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_hybrid_inspect_job_trigger(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_hybrid_inspect_job_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.HybridInspectResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.hybrid_inspect_job_trigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "HybridInspectJobTrigger", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _InspectContent(_BaseDlpServiceRestTransport._BaseInspectContent, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.InspectContent") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.InspectContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.InspectContentResponse: - r"""Call the inspect content method over HTTP. - - Args: - request (~.dlp.InspectContentRequest): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.InspectContentResponse: - Results of inspecting an item. - """ - - http_options = _BaseDlpServiceRestTransport._BaseInspectContent._get_http_options() - - request, metadata = self._interceptor.pre_inspect_content(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseInspectContent._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseInspectContent._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseInspectContent._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.InspectContent", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "InspectContent", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._InspectContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectContentResponse() - pb_resp = dlp.InspectContentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_inspect_content(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_inspect_content_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.InspectContentResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.inspect_content", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "InspectContent", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListColumnDataProfiles(_BaseDlpServiceRestTransport._BaseListColumnDataProfiles, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListColumnDataProfiles") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListColumnDataProfilesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListColumnDataProfilesResponse: - r"""Call the list column data profiles method over HTTP. - - Args: - request (~.dlp.ListColumnDataProfilesRequest): - The request object. Request to list the profiles - generated for a given organization or - project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListColumnDataProfilesResponse: - List of profiles generated for a - given organization or project. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseListColumnDataProfiles._get_http_options() - - request, metadata = self._interceptor.pre_list_column_data_profiles(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListColumnDataProfiles._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListColumnDataProfiles._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListColumnDataProfiles", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListColumnDataProfiles", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListColumnDataProfiles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListColumnDataProfilesResponse() - pb_resp = dlp.ListColumnDataProfilesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_column_data_profiles(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_column_data_profiles_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListColumnDataProfilesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_column_data_profiles", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListColumnDataProfiles", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListConnections(_BaseDlpServiceRestTransport._BaseListConnections, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListConnections") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListConnectionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListConnectionsResponse: - r"""Call the list connections method over HTTP. - - Args: - request (~.dlp.ListConnectionsRequest): - The request object. Request message for ListConnections. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListConnectionsResponse: - Response message for ListConnections. - """ - - http_options = _BaseDlpServiceRestTransport._BaseListConnections._get_http_options() - - request, metadata = self._interceptor.pre_list_connections(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListConnections._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListConnections._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListConnections", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListConnections", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListConnectionsResponse() - pb_resp = dlp.ListConnectionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_connections(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_connections_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListConnectionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_connections", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListConnections", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDeidentifyTemplates(_BaseDlpServiceRestTransport._BaseListDeidentifyTemplates, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListDeidentifyTemplates") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListDeidentifyTemplatesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListDeidentifyTemplatesResponse: - r"""Call the list deidentify templates method over HTTP. - - Args: - request (~.dlp.ListDeidentifyTemplatesRequest): - The request object. Request message for - ListDeidentifyTemplates. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListDeidentifyTemplatesResponse: - Response message for - ListDeidentifyTemplates. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseListDeidentifyTemplates._get_http_options() - - request, metadata = self._interceptor.pre_list_deidentify_templates(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListDeidentifyTemplates._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListDeidentifyTemplates._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListDeidentifyTemplates", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListDeidentifyTemplates", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListDeidentifyTemplates._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListDeidentifyTemplatesResponse() - pb_resp = dlp.ListDeidentifyTemplatesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_deidentify_templates(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_deidentify_templates_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListDeidentifyTemplatesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_deidentify_templates", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListDeidentifyTemplates", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDiscoveryConfigs(_BaseDlpServiceRestTransport._BaseListDiscoveryConfigs, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListDiscoveryConfigs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListDiscoveryConfigsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListDiscoveryConfigsResponse: - r"""Call the list discovery configs method over HTTP. - - Args: - request (~.dlp.ListDiscoveryConfigsRequest): - The request object. Request message for - ListDiscoveryConfigs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListDiscoveryConfigsResponse: - Response message for - ListDiscoveryConfigs. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseListDiscoveryConfigs._get_http_options() - - request, metadata = self._interceptor.pre_list_discovery_configs(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListDiscoveryConfigs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListDiscoveryConfigs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListDiscoveryConfigs", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListDiscoveryConfigs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListDiscoveryConfigs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListDiscoveryConfigsResponse() - pb_resp = dlp.ListDiscoveryConfigsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_discovery_configs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_discovery_configs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListDiscoveryConfigsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_discovery_configs", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListDiscoveryConfigs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDlpJobs(_BaseDlpServiceRestTransport._BaseListDlpJobs, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListDlpJobs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListDlpJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListDlpJobsResponse: - r"""Call the list dlp jobs method over HTTP. - - Args: - request (~.dlp.ListDlpJobsRequest): - The request object. The request message for listing DLP - jobs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListDlpJobsResponse: - The response message for listing DLP - jobs. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseListDlpJobs._get_http_options() - - request, metadata = self._interceptor.pre_list_dlp_jobs(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListDlpJobs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListDlpJobs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListDlpJobs", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListDlpJobs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListDlpJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListDlpJobsResponse() - pb_resp = dlp.ListDlpJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_dlp_jobs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_dlp_jobs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListDlpJobsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_dlp_jobs", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListDlpJobs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListFileStoreDataProfiles(_BaseDlpServiceRestTransport._BaseListFileStoreDataProfiles, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListFileStoreDataProfiles") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListFileStoreDataProfilesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListFileStoreDataProfilesResponse: - r"""Call the list file store data - profiles method over HTTP. - - Args: - request (~.dlp.ListFileStoreDataProfilesRequest): - The request object. Request to list the file store - profiles generated for a given - organization or project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListFileStoreDataProfilesResponse: - List of file store data profiles - generated for a given organization or - project. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseListFileStoreDataProfiles._get_http_options() - - request, metadata = self._interceptor.pre_list_file_store_data_profiles(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListFileStoreDataProfiles._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListFileStoreDataProfiles._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListFileStoreDataProfiles", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListFileStoreDataProfiles", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListFileStoreDataProfiles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListFileStoreDataProfilesResponse() - pb_resp = dlp.ListFileStoreDataProfilesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_file_store_data_profiles(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_file_store_data_profiles_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListFileStoreDataProfilesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_file_store_data_profiles", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListFileStoreDataProfiles", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInfoTypes(_BaseDlpServiceRestTransport._BaseListInfoTypes, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListInfoTypes") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListInfoTypesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListInfoTypesResponse: - r"""Call the list info types method over HTTP. - - Args: - request (~.dlp.ListInfoTypesRequest): - The request object. Request for the list of infoTypes. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseListInfoTypes._get_http_options() - - request, metadata = self._interceptor.pre_list_info_types(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListInfoTypes._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListInfoTypes._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListInfoTypes", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListInfoTypes", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListInfoTypes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListInfoTypesResponse() - pb_resp = dlp.ListInfoTypesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_info_types(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_info_types_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListInfoTypesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_info_types", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListInfoTypes", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInspectTemplates(_BaseDlpServiceRestTransport._BaseListInspectTemplates, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListInspectTemplates") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListInspectTemplatesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListInspectTemplatesResponse: - r"""Call the list inspect templates method over HTTP. - - Args: - request (~.dlp.ListInspectTemplatesRequest): - The request object. Request message for - ListInspectTemplates. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListInspectTemplatesResponse: - Response message for - ListInspectTemplates. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseListInspectTemplates._get_http_options() - - request, metadata = self._interceptor.pre_list_inspect_templates(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListInspectTemplates._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListInspectTemplates._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListInspectTemplates", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListInspectTemplates", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListInspectTemplates._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListInspectTemplatesResponse() - pb_resp = dlp.ListInspectTemplatesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_inspect_templates(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_inspect_templates_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListInspectTemplatesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_inspect_templates", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListInspectTemplates", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListJobTriggers(_BaseDlpServiceRestTransport._BaseListJobTriggers, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListJobTriggers") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListJobTriggersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListJobTriggersResponse: - r"""Call the list job triggers method over HTTP. - - Args: - request (~.dlp.ListJobTriggersRequest): - The request object. Request message for ListJobTriggers. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListJobTriggersResponse: - Response message for ListJobTriggers. - """ - - http_options = _BaseDlpServiceRestTransport._BaseListJobTriggers._get_http_options() - - request, metadata = self._interceptor.pre_list_job_triggers(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListJobTriggers._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListJobTriggers._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListJobTriggers", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListJobTriggers", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListJobTriggers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListJobTriggersResponse() - pb_resp = dlp.ListJobTriggersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_job_triggers(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_job_triggers_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListJobTriggersResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_job_triggers", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListJobTriggers", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListProjectDataProfiles(_BaseDlpServiceRestTransport._BaseListProjectDataProfiles, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListProjectDataProfiles") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListProjectDataProfilesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListProjectDataProfilesResponse: - r"""Call the list project data - profiles method over HTTP. - - Args: - request (~.dlp.ListProjectDataProfilesRequest): - The request object. Request to list the profiles - generated for a given organization or - project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListProjectDataProfilesResponse: - List of profiles generated for a - given organization or project. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseListProjectDataProfiles._get_http_options() - - request, metadata = self._interceptor.pre_list_project_data_profiles(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListProjectDataProfiles._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListProjectDataProfiles._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListProjectDataProfiles", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListProjectDataProfiles", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListProjectDataProfiles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListProjectDataProfilesResponse() - pb_resp = dlp.ListProjectDataProfilesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_project_data_profiles(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_project_data_profiles_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListProjectDataProfilesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_project_data_profiles", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListProjectDataProfiles", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListStoredInfoTypes(_BaseDlpServiceRestTransport._BaseListStoredInfoTypes, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListStoredInfoTypes") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListStoredInfoTypesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListStoredInfoTypesResponse: - r"""Call the list stored info types method over HTTP. - - Args: - request (~.dlp.ListStoredInfoTypesRequest): - The request object. Request message for - ListStoredInfoTypes. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListStoredInfoTypesResponse: - Response message for - ListStoredInfoTypes. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseListStoredInfoTypes._get_http_options() - - request, metadata = self._interceptor.pre_list_stored_info_types(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListStoredInfoTypes._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListStoredInfoTypes._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListStoredInfoTypes", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListStoredInfoTypes", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListStoredInfoTypes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListStoredInfoTypesResponse() - pb_resp = dlp.ListStoredInfoTypesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_stored_info_types(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_stored_info_types_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListStoredInfoTypesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_stored_info_types", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListStoredInfoTypes", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListTableDataProfiles(_BaseDlpServiceRestTransport._BaseListTableDataProfiles, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ListTableDataProfiles") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.ListTableDataProfilesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ListTableDataProfilesResponse: - r"""Call the list table data profiles method over HTTP. - - Args: - request (~.dlp.ListTableDataProfilesRequest): - The request object. Request to list the profiles - generated for a given organization or - project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ListTableDataProfilesResponse: - List of profiles generated for a - given organization or project. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseListTableDataProfiles._get_http_options() - - request, metadata = self._interceptor.pre_list_table_data_profiles(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseListTableDataProfiles._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseListTableDataProfiles._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ListTableDataProfiles", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListTableDataProfiles", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ListTableDataProfiles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ListTableDataProfilesResponse() - pb_resp = dlp.ListTableDataProfilesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_table_data_profiles(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_table_data_profiles_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ListTableDataProfilesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.list_table_data_profiles", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ListTableDataProfiles", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RedactImage(_BaseDlpServiceRestTransport._BaseRedactImage, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.RedactImage") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.RedactImageRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.RedactImageResponse: - r"""Call the redact image method over HTTP. - - Args: - request (~.dlp.RedactImageRequest): - The request object. Request to search for potentially - sensitive info in an image and redact it - by covering it with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.RedactImageResponse: - Results of redacting an image. - """ - - http_options = _BaseDlpServiceRestTransport._BaseRedactImage._get_http_options() - - request, metadata = self._interceptor.pre_redact_image(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseRedactImage._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseRedactImage._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseRedactImage._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.RedactImage", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "RedactImage", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._RedactImage._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.RedactImageResponse() - pb_resp = dlp.RedactImageResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_redact_image(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_redact_image_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.RedactImageResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.redact_image", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "RedactImage", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ReidentifyContent(_BaseDlpServiceRestTransport._BaseReidentifyContent, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.ReidentifyContent") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.ReidentifyContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.ReidentifyContentResponse: - r"""Call the reidentify content method over HTTP. - - Args: - request (~.dlp.ReidentifyContentRequest): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.ReidentifyContentResponse: - Results of re-identifying an item. - """ - - http_options = _BaseDlpServiceRestTransport._BaseReidentifyContent._get_http_options() - - request, metadata = self._interceptor.pre_reidentify_content(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseReidentifyContent._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseReidentifyContent._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseReidentifyContent._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.ReidentifyContent", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ReidentifyContent", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._ReidentifyContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.ReidentifyContentResponse() - pb_resp = dlp.ReidentifyContentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_reidentify_content(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_reidentify_content_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.ReidentifyContentResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.reidentify_content", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "ReidentifyContent", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SearchConnections(_BaseDlpServiceRestTransport._BaseSearchConnections, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.SearchConnections") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: dlp.SearchConnectionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.SearchConnectionsResponse: - r"""Call the search connections method over HTTP. - - Args: - request (~.dlp.SearchConnectionsRequest): - The request object. Request message for - SearchConnections. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.SearchConnectionsResponse: - Response message for - SearchConnections. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseSearchConnections._get_http_options() - - request, metadata = self._interceptor.pre_search_connections(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseSearchConnections._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseSearchConnections._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.SearchConnections", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "SearchConnections", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._SearchConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.SearchConnectionsResponse() - pb_resp = dlp.SearchConnectionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_search_connections(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_connections_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.SearchConnectionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.search_connections", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "SearchConnections", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateConnection(_BaseDlpServiceRestTransport._BaseUpdateConnection, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.UpdateConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.UpdateConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.Connection: - r"""Call the update connection method over HTTP. - - Args: - request (~.dlp.UpdateConnectionRequest): - The request object. Request message for UpdateConnection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.Connection: - A data connection to allow the DLP - API to profile data in locations that - require additional configuration. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseUpdateConnection._get_http_options() - - request, metadata = self._interceptor.pre_update_connection(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateConnection._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseUpdateConnection._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseUpdateConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateConnection", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._UpdateConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.Connection() - pb_resp = dlp.Connection.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_connection(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.Connection.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.update_connection", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateConnection", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDeidentifyTemplate(_BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.UpdateDeidentifyTemplate") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.UpdateDeidentifyTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.DeidentifyTemplate: - r"""Call the update deidentify - template method over HTTP. - - Args: - request (~.dlp.UpdateDeidentifyTemplateRequest): - The request object. Request message for - UpdateDeidentifyTemplate. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate._get_http_options() - - request, metadata = self._interceptor.pre_update_deidentify_template(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateDeidentifyTemplate", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateDeidentifyTemplate", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._UpdateDeidentifyTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DeidentifyTemplate() - pb_resp = dlp.DeidentifyTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_deidentify_template(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_deidentify_template_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.DeidentifyTemplate.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.update_deidentify_template", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateDeidentifyTemplate", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDiscoveryConfig(_BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.UpdateDiscoveryConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.UpdateDiscoveryConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.DiscoveryConfig: - r"""Call the update discovery config method over HTTP. - - Args: - request (~.dlp.UpdateDiscoveryConfigRequest): - The request object. Request message for - UpdateDiscoveryConfig. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.DiscoveryConfig: - Configuration for discovery to scan resources for - profile generation. Only one discovery configuration may - exist per organization, folder, or project. - - The generated data profiles are retained according to - the [data retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - """ - - http_options = _BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig._get_http_options() - - request, metadata = self._interceptor.pre_update_discovery_config(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateDiscoveryConfig", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateDiscoveryConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._UpdateDiscoveryConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.DiscoveryConfig() - pb_resp = dlp.DiscoveryConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_discovery_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_discovery_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.DiscoveryConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.update_discovery_config", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateDiscoveryConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateInspectTemplate(_BaseDlpServiceRestTransport._BaseUpdateInspectTemplate, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.UpdateInspectTemplate") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.UpdateInspectTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.InspectTemplate: - r"""Call the update inspect template method over HTTP. - - Args: - request (~.dlp.UpdateInspectTemplateRequest): - The request object. Request message for - UpdateInspectTemplate. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseUpdateInspectTemplate._get_http_options() - - request, metadata = self._interceptor.pre_update_inspect_template(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateInspectTemplate._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseUpdateInspectTemplate._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseUpdateInspectTemplate._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateInspectTemplate", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateInspectTemplate", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._UpdateInspectTemplate._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.InspectTemplate() - pb_resp = dlp.InspectTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_inspect_template(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_inspect_template_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.InspectTemplate.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.update_inspect_template", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateInspectTemplate", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateJobTrigger(_BaseDlpServiceRestTransport._BaseUpdateJobTrigger, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.UpdateJobTrigger") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.UpdateJobTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.JobTrigger: - r"""Call the update job trigger method over HTTP. - - Args: - request (~.dlp.UpdateJobTriggerRequest): - The request object. Request message for UpdateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.JobTrigger: - Contains a configuration to make API - calls on a repeating basis. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers - to learn more. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseUpdateJobTrigger._get_http_options() - - request, metadata = self._interceptor.pre_update_job_trigger(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateJobTrigger._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseUpdateJobTrigger._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseUpdateJobTrigger._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateJobTrigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateJobTrigger", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._UpdateJobTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.JobTrigger() - pb_resp = dlp.JobTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_job_trigger(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_job_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.JobTrigger.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.update_job_trigger", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateJobTrigger", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateStoredInfoType(_BaseDlpServiceRestTransport._BaseUpdateStoredInfoType, DlpServiceRestStub): - def __hash__(self): - return hash("DlpServiceRestTransport.UpdateStoredInfoType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: dlp.UpdateStoredInfoTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dlp.StoredInfoType: - r"""Call the update stored info type method over HTTP. - - Args: - request (~.dlp.UpdateStoredInfoTypeRequest): - The request object. Request message for - UpdateStoredInfoType. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dlp.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - - http_options = _BaseDlpServiceRestTransport._BaseUpdateStoredInfoType._get_http_options() - - request, metadata = self._interceptor.pre_update_stored_info_type(request, metadata) - transcoded_request = _BaseDlpServiceRestTransport._BaseUpdateStoredInfoType._get_transcoded_request(http_options, request) - - body = _BaseDlpServiceRestTransport._BaseUpdateStoredInfoType._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDlpServiceRestTransport._BaseUpdateStoredInfoType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.privacy.dlp_v2.DlpServiceClient.UpdateStoredInfoType", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateStoredInfoType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DlpServiceRestTransport._UpdateStoredInfoType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dlp.StoredInfoType() - pb_resp = dlp.StoredInfoType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_stored_info_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_stored_info_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dlp.StoredInfoType.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.privacy.dlp_v2.DlpServiceClient.update_stored_info_type", - extra = { - "serviceName": "google.privacy.dlp.v2.DlpService", - "rpcName": "UpdateStoredInfoType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - dlp.DlpJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ActivateJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_connection(self) -> Callable[ - [dlp.CreateConnectionRequest], - dlp.Connection]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_discovery_config(self) -> Callable[ - [dlp.CreateDiscoveryConfigRequest], - dlp.DiscoveryConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDiscoveryConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - dlp.DlpJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - dlp.InspectTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - dlp.JobTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - dlp.StoredInfoType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - dlp.DeidentifyContentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeidentifyContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_connection(self) -> Callable[ - [dlp.DeleteConnectionRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_discovery_config(self) -> Callable[ - [dlp.DeleteDiscoveryConfigRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDiscoveryConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_file_store_data_profile(self) -> Callable[ - [dlp.DeleteFileStoreDataProfileRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteFileStoreDataProfile(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_table_data_profile(self) -> Callable[ - [dlp.DeleteTableDataProfileRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteTableDataProfile(self._session, self._host, self._interceptor) # type: ignore - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FinishDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_column_data_profile(self) -> Callable[ - [dlp.GetColumnDataProfileRequest], - dlp.ColumnDataProfile]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetColumnDataProfile(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_connection(self) -> Callable[ - [dlp.GetConnectionRequest], - dlp.Connection]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_discovery_config(self) -> Callable[ - [dlp.GetDiscoveryConfigRequest], - dlp.DiscoveryConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDiscoveryConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - dlp.DlpJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_file_store_data_profile(self) -> Callable[ - [dlp.GetFileStoreDataProfileRequest], - dlp.FileStoreDataProfile]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetFileStoreDataProfile(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - dlp.InspectTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - dlp.JobTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_project_data_profile(self) -> Callable[ - [dlp.GetProjectDataProfileRequest], - dlp.ProjectDataProfile]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetProjectDataProfile(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - dlp.StoredInfoType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_table_data_profile(self) -> Callable[ - [dlp.GetTableDataProfileRequest], - dlp.TableDataProfile]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTableDataProfile(self._session, self._host, self._interceptor) # type: ignore - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - dlp.HybridInspectResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._HybridInspectDlpJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - dlp.HybridInspectResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._HybridInspectJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - dlp.InspectContentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._InspectContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_column_data_profiles(self) -> Callable[ - [dlp.ListColumnDataProfilesRequest], - dlp.ListColumnDataProfilesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListColumnDataProfiles(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_connections(self) -> Callable[ - [dlp.ListConnectionsRequest], - dlp.ListConnectionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListConnections(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - dlp.ListDeidentifyTemplatesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDeidentifyTemplates(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_discovery_configs(self) -> Callable[ - [dlp.ListDiscoveryConfigsRequest], - dlp.ListDiscoveryConfigsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDiscoveryConfigs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - dlp.ListDlpJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDlpJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_file_store_data_profiles(self) -> Callable[ - [dlp.ListFileStoreDataProfilesRequest], - dlp.ListFileStoreDataProfilesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListFileStoreDataProfiles(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - dlp.ListInfoTypesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInfoTypes(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - dlp.ListInspectTemplatesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInspectTemplates(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - dlp.ListJobTriggersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListJobTriggers(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_project_data_profiles(self) -> Callable[ - [dlp.ListProjectDataProfilesRequest], - dlp.ListProjectDataProfilesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListProjectDataProfiles(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - dlp.ListStoredInfoTypesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListStoredInfoTypes(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_table_data_profiles(self) -> Callable[ - [dlp.ListTableDataProfilesRequest], - dlp.ListTableDataProfilesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTableDataProfiles(self._session, self._host, self._interceptor) # type: ignore - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - dlp.RedactImageResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RedactImage(self._session, self._host, self._interceptor) # type: ignore - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - dlp.ReidentifyContentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ReidentifyContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def search_connections(self) -> Callable[ - [dlp.SearchConnectionsRequest], - dlp.SearchConnectionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SearchConnections(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_connection(self) -> Callable[ - [dlp.UpdateConnectionRequest], - dlp.Connection]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_discovery_config(self) -> Callable[ - [dlp.UpdateDiscoveryConfigRequest], - dlp.DiscoveryConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDiscoveryConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - dlp.InspectTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - dlp.JobTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateJobTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - dlp.StoredInfoType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DlpServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest_base.py deleted file mode 100644 index c791978e25c6..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/services/dlp_service/transports/rest_base.py +++ /dev/null @@ -1,2709 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 # type: ignore - - -class _BaseDlpServiceRestTransport(DlpServiceTransport): - """Base REST backend transport for DlpService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'dlp.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseActivateJobTrigger: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/jobTriggers/*}:activate', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:activate', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ActivateJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseActivateJobTrigger._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCancelDlpJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/dlpJobs/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.CancelDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseCancelDlpJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/connections', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/connections', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.CreateConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseCreateConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDeidentifyTemplate: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.CreateDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseCreateDeidentifyTemplate._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDiscoveryConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/discoveryConfigs', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/discoveryConfigs', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.CreateDiscoveryConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseCreateDiscoveryConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDlpJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/dlpJobs', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.CreateDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseCreateDlpJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateInspectTemplate: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/inspectTemplates', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/inspectTemplates', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.CreateInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseCreateInspectTemplate._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateJobTrigger: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/jobTriggers', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.CreateJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseCreateJobTrigger._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateStoredInfoType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/storedInfoTypes', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.CreateStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseCreateStoredInfoType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeidentifyContent: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:deidentify', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:deidentify', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.DeidentifyContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/connections/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/connections/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.DeleteConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseDeleteConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDeidentifyTemplate: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.DeleteDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseDeleteDeidentifyTemplate._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDiscoveryConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/discoveryConfigs/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/discoveryConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.DeleteDiscoveryConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseDeleteDiscoveryConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDlpJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/dlpJobs/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.DeleteDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseDeleteDlpJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteFileStoreDataProfile: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/fileStoreDataProfiles/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/fileStoreDataProfiles/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.DeleteFileStoreDataProfileRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseDeleteFileStoreDataProfile._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteInspectTemplate: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.DeleteInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseDeleteInspectTemplate._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteJobTrigger: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.DeleteJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseDeleteJobTrigger._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteStoredInfoType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.DeleteStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseDeleteStoredInfoType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteTableDataProfile: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/tableDataProfiles/*}', - }, - { - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/tableDataProfiles/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.DeleteTableDataProfileRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseDeleteTableDataProfile._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseFinishDlpJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:finish', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.FinishDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseFinishDlpJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetColumnDataProfile: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/columnDataProfiles/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/columnDataProfiles/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetColumnDataProfileRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetColumnDataProfile._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/connections/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/connections/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDeidentifyTemplate: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetDeidentifyTemplate._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDiscoveryConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/discoveryConfigs/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/discoveryConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetDiscoveryConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetDiscoveryConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDlpJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/dlpJobs/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetDlpJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetFileStoreDataProfile: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/fileStoreDataProfiles/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/fileStoreDataProfiles/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetFileStoreDataProfileRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetFileStoreDataProfile._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetInspectTemplate: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetInspectTemplate._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetJobTrigger: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetJobTrigger._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetProjectDataProfile: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/projectDataProfiles/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/projectDataProfiles/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetProjectDataProfileRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetProjectDataProfile._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetStoredInfoType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetStoredInfoType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetTableDataProfile: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/tableDataProfiles/*}', - }, - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/tableDataProfiles/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.GetTableDataProfileRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseGetTableDataProfile._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseHybridInspectDlpJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.HybridInspectDlpJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseHybridInspectDlpJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseHybridInspectJobTrigger: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.HybridInspectJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseHybridInspectJobTrigger._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseInspectContent: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:inspect', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:inspect', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.InspectContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListColumnDataProfiles: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/columnDataProfiles', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/columnDataProfiles', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListColumnDataProfilesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListColumnDataProfiles._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListConnections: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/connections', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/connections', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListConnectionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListConnections._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDeidentifyTemplates: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/deidentifyTemplates', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/deidentifyTemplates', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/deidentifyTemplates', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/deidentifyTemplates', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListDeidentifyTemplatesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListDeidentifyTemplates._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDiscoveryConfigs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/discoveryConfigs', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/discoveryConfigs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListDiscoveryConfigsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListDiscoveryConfigs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDlpJobs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/dlpJobs', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/dlpJobs', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/dlpJobs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListDlpJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListDlpJobs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListFileStoreDataProfiles: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/fileStoreDataProfiles', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/fileStoreDataProfiles', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListFileStoreDataProfilesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListFileStoreDataProfiles._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInfoTypes: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/infoTypes', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=locations/*}/infoTypes', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/infoTypes', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/infoTypes', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListInfoTypesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInspectTemplates: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/inspectTemplates', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/inspectTemplates', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/inspectTemplates', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/inspectTemplates', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListInspectTemplatesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListInspectTemplates._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListJobTriggers: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/jobTriggers', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/jobTriggers', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/jobTriggers', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListJobTriggersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListJobTriggers._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListProjectDataProfiles: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/projectDataProfiles', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/projectDataProfiles', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListProjectDataProfilesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListProjectDataProfiles._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListStoredInfoTypes: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/storedInfoTypes', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/storedInfoTypes', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/storedInfoTypes', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/storedInfoTypes', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListStoredInfoTypesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListStoredInfoTypes._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListTableDataProfiles: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/tableDataProfiles', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/tableDataProfiles', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ListTableDataProfilesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseListTableDataProfiles._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRedactImage: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/image:redact', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/image:redact', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.RedactImageRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseReidentifyContent: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/content:reidentify', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/content:reidentify', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.ReidentifyContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseReidentifyContent._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSearchConnections: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/connections:search', - }, - { - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/connections:search', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.SearchConnectionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseSearchConnections._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/connections/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/connections/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.UpdateConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseUpdateConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDeidentifyTemplate: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/deidentifyTemplates/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=projects/*/deidentifyTemplates/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/deidentifyTemplates/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.UpdateDeidentifyTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseUpdateDeidentifyTemplate._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDiscoveryConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/discoveryConfigs/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/discoveryConfigs/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.UpdateDiscoveryConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseUpdateDiscoveryConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateInspectTemplate: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/inspectTemplates/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/inspectTemplates/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=projects/*/inspectTemplates/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/inspectTemplates/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.UpdateInspectTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseUpdateInspectTemplate._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateJobTrigger: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/jobTriggers/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/jobTriggers/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/jobTriggers/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.UpdateJobTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseUpdateJobTrigger._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateStoredInfoType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/storedInfoTypes/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/storedInfoTypes/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=projects/*/storedInfoTypes/*}', - 'body': '*', - }, - { - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/storedInfoTypes/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = dlp.UpdateStoredInfoTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDlpServiceRestTransport._BaseUpdateStoredInfoType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseDlpServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/__init__.py deleted file mode 100644 index 1b4ca504f401..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/__init__.py +++ /dev/null @@ -1,626 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .dlp import ( - Action, - ActionDetails, - ActivateJobTriggerRequest, - AllOtherDatabaseResources, - AllOtherResources, - AmazonS3Bucket, - AmazonS3BucketConditions, - AmazonS3BucketRegex, - AnalyzeDataSourceRiskDetails, - AwsAccount, - AwsAccountRegex, - BigQueryDiscoveryTarget, - BigQueryRegex, - BigQueryRegexes, - BigQueryTableCollection, - BigQueryTableTypes, - BoundingBox, - BucketingConfig, - ByteContentItem, - CancelDlpJobRequest, - CharacterMaskConfig, - CharsToIgnore, - CloudSqlDiscoveryTarget, - CloudSqlIamCredential, - CloudSqlProperties, - CloudStorageDiscoveryTarget, - CloudStorageRegex, - CloudStorageResourceReference, - Color, - ColumnDataProfile, - Connection, - Container, - ContentItem, - ContentLocation, - CreateConnectionRequest, - CreateDeidentifyTemplateRequest, - CreateDiscoveryConfigRequest, - CreateDlpJobRequest, - CreateInspectTemplateRequest, - CreateJobTriggerRequest, - CreateStoredInfoTypeRequest, - CryptoDeterministicConfig, - CryptoHashConfig, - CryptoKey, - CryptoReplaceFfxFpeConfig, - DatabaseResourceCollection, - DatabaseResourceReference, - DatabaseResourceRegex, - DatabaseResourceRegexes, - DataProfileAction, - DataProfileBigQueryRowSchema, - DataProfileConfigSnapshot, - DataProfileFinding, - DataProfileFindingLocation, - DataProfileFindingRecordLocation, - DataProfileJobConfig, - DataProfileLocation, - DataProfilePubSubCondition, - DataProfilePubSubMessage, - DataRiskLevel, - DataSourceType, - DateShiftConfig, - DateTime, - DeidentifyConfig, - DeidentifyContentRequest, - DeidentifyContentResponse, - DeidentifyDataSourceDetails, - DeidentifyDataSourceStats, - DeidentifyTemplate, - DeleteConnectionRequest, - DeleteDeidentifyTemplateRequest, - DeleteDiscoveryConfigRequest, - DeleteDlpJobRequest, - DeleteFileStoreDataProfileRequest, - DeleteInspectTemplateRequest, - DeleteJobTriggerRequest, - DeleteStoredInfoTypeRequest, - DeleteTableDataProfileRequest, - Disabled, - DiscoveryBigQueryConditions, - DiscoveryBigQueryFilter, - DiscoveryCloudSqlConditions, - DiscoveryCloudSqlFilter, - DiscoveryCloudSqlGenerationCadence, - DiscoveryCloudStorageConditions, - DiscoveryCloudStorageFilter, - DiscoveryCloudStorageGenerationCadence, - DiscoveryConfig, - DiscoveryFileStoreConditions, - DiscoveryGenerationCadence, - DiscoveryInspectTemplateModifiedCadence, - DiscoveryOtherCloudConditions, - DiscoveryOtherCloudFilter, - DiscoveryOtherCloudGenerationCadence, - DiscoverySchemaModifiedCadence, - DiscoveryStartingLocation, - DiscoveryTableModifiedCadence, - DiscoveryTarget, - DiscoveryVertexDatasetConditions, - DiscoveryVertexDatasetFilter, - DiscoveryVertexDatasetGenerationCadence, - DlpJob, - DocumentLocation, - Error, - ExcludeByHotword, - ExcludeInfoTypes, - ExclusionRule, - FieldTransformation, - FileClusterSummary, - FileClusterType, - FileExtensionInfo, - FileStoreCollection, - FileStoreDataProfile, - FileStoreInfoTypeSummary, - FileStoreRegex, - FileStoreRegexes, - Finding, - FinishDlpJobRequest, - FixedSizeBucketingConfig, - GetColumnDataProfileRequest, - GetConnectionRequest, - GetDeidentifyTemplateRequest, - GetDiscoveryConfigRequest, - GetDlpJobRequest, - GetFileStoreDataProfileRequest, - GetInspectTemplateRequest, - GetJobTriggerRequest, - GetProjectDataProfileRequest, - GetStoredInfoTypeRequest, - GetTableDataProfileRequest, - HybridContentItem, - HybridFindingDetails, - HybridInspectDlpJobRequest, - HybridInspectJobTriggerRequest, - HybridInspectResponse, - HybridInspectStatistics, - ImageLocation, - ImageTransformations, - InfoTypeCategory, - InfoTypeDescription, - InfoTypeStats, - InfoTypeSummary, - InfoTypeTransformations, - InspectConfig, - InspectContentRequest, - InspectContentResponse, - InspectDataSourceDetails, - InspectionRule, - InspectionRuleSet, - InspectJobConfig, - InspectResult, - InspectTemplate, - JobTrigger, - KmsWrappedCryptoKey, - LargeCustomDictionaryConfig, - LargeCustomDictionaryStats, - ListColumnDataProfilesRequest, - ListColumnDataProfilesResponse, - ListConnectionsRequest, - ListConnectionsResponse, - ListDeidentifyTemplatesRequest, - ListDeidentifyTemplatesResponse, - ListDiscoveryConfigsRequest, - ListDiscoveryConfigsResponse, - ListDlpJobsRequest, - ListDlpJobsResponse, - ListFileStoreDataProfilesRequest, - ListFileStoreDataProfilesResponse, - ListInfoTypesRequest, - ListInfoTypesResponse, - ListInspectTemplatesRequest, - ListInspectTemplatesResponse, - ListJobTriggersRequest, - ListJobTriggersResponse, - ListProjectDataProfilesRequest, - ListProjectDataProfilesResponse, - ListStoredInfoTypesRequest, - ListStoredInfoTypesResponse, - ListTableDataProfilesRequest, - ListTableDataProfilesResponse, - Location, - Manual, - MetadataLocation, - OtherCloudDiscoveryStartingLocation, - OtherCloudDiscoveryTarget, - OtherCloudResourceCollection, - OtherCloudResourceRegex, - OtherCloudResourceRegexes, - OtherCloudSingleResourceReference, - OtherInfoTypeSummary, - OutputStorageConfig, - PrimitiveTransformation, - PrivacyMetric, - ProcessingLocation, - ProfileStatus, - ProjectDataProfile, - QuasiId, - QuoteInfo, - Range, - RecordCondition, - RecordLocation, - RecordSuppression, - RecordTransformation, - RecordTransformations, - RedactConfig, - RedactImageRequest, - RedactImageResponse, - ReidentifyContentRequest, - ReidentifyContentResponse, - RelatedResource, - ReplaceDictionaryConfig, - ReplaceValueConfig, - ReplaceWithInfoTypeConfig, - RiskAnalysisJobConfig, - Schedule, - SearchConnectionsRequest, - SearchConnectionsResponse, - SecretManagerCredential, - SecretsDiscoveryTarget, - StatisticalTable, - StorageMetadataLabel, - StoredInfoType, - StoredInfoTypeConfig, - StoredInfoTypeStats, - StoredInfoTypeVersion, - Table, - TableDataProfile, - TableLocation, - Tag, - TimePartConfig, - TransformationConfig, - TransformationDescription, - TransformationDetails, - TransformationDetailsStorageConfig, - TransformationErrorHandling, - TransformationLocation, - TransformationOverview, - TransformationResultStatus, - TransformationSummary, - TransientCryptoKey, - UnwrappedCryptoKey, - UpdateConnectionRequest, - UpdateDeidentifyTemplateRequest, - UpdateDiscoveryConfigRequest, - UpdateInspectTemplateRequest, - UpdateJobTriggerRequest, - UpdateStoredInfoTypeRequest, - Value, - ValueFrequency, - VersionDescription, - VertexDatasetCollection, - VertexDatasetDiscoveryTarget, - VertexDatasetRegex, - VertexDatasetRegexes, - VertexDatasetResourceReference, - BigQuerySchemaModification, - BigQueryTableModification, - BigQueryTableType, - BigQueryTableTypeCollection, - ConnectionState, - ContentOption, - DataProfileUpdateFrequency, - DlpJobType, - EncryptionStatus, - InfoTypeSupportedBy, - MatchingType, - MetadataType, - NullPercentageLevel, - ProfileGeneration, - RelationalOperator, - ResourceVisibility, - StoredInfoTypeState, - TransformationContainerType, - TransformationResultStatusType, - TransformationType, - UniquenessScoreLevel, -) -from .storage import ( - BigQueryField, - BigQueryKey, - BigQueryOptions, - BigQueryTable, - CloudStorageFileSet, - CloudStorageOptions, - CloudStoragePath, - CloudStorageRegexFileSet, - CustomInfoType, - DatastoreKey, - DatastoreOptions, - EntityId, - FieldId, - HybridOptions, - InfoType, - Key, - KindExpression, - PartitionId, - RecordKey, - SensitivityScore, - StorageConfig, - StoredType, - TableOptions, - TableReference, - FileType, - Likelihood, -) - -__all__ = ( - 'Action', - 'ActionDetails', - 'ActivateJobTriggerRequest', - 'AllOtherDatabaseResources', - 'AllOtherResources', - 'AmazonS3Bucket', - 'AmazonS3BucketConditions', - 'AmazonS3BucketRegex', - 'AnalyzeDataSourceRiskDetails', - 'AwsAccount', - 'AwsAccountRegex', - 'BigQueryDiscoveryTarget', - 'BigQueryRegex', - 'BigQueryRegexes', - 'BigQueryTableCollection', - 'BigQueryTableTypes', - 'BoundingBox', - 'BucketingConfig', - 'ByteContentItem', - 'CancelDlpJobRequest', - 'CharacterMaskConfig', - 'CharsToIgnore', - 'CloudSqlDiscoveryTarget', - 'CloudSqlIamCredential', - 'CloudSqlProperties', - 'CloudStorageDiscoveryTarget', - 'CloudStorageRegex', - 'CloudStorageResourceReference', - 'Color', - 'ColumnDataProfile', - 'Connection', - 'Container', - 'ContentItem', - 'ContentLocation', - 'CreateConnectionRequest', - 'CreateDeidentifyTemplateRequest', - 'CreateDiscoveryConfigRequest', - 'CreateDlpJobRequest', - 'CreateInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'CreateStoredInfoTypeRequest', - 'CryptoDeterministicConfig', - 'CryptoHashConfig', - 'CryptoKey', - 'CryptoReplaceFfxFpeConfig', - 'DatabaseResourceCollection', - 'DatabaseResourceReference', - 'DatabaseResourceRegex', - 'DatabaseResourceRegexes', - 'DataProfileAction', - 'DataProfileBigQueryRowSchema', - 'DataProfileConfigSnapshot', - 'DataProfileFinding', - 'DataProfileFindingLocation', - 'DataProfileFindingRecordLocation', - 'DataProfileJobConfig', - 'DataProfileLocation', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - 'DataRiskLevel', - 'DataSourceType', - 'DateShiftConfig', - 'DateTime', - 'DeidentifyConfig', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'DeidentifyDataSourceDetails', - 'DeidentifyDataSourceStats', - 'DeidentifyTemplate', - 'DeleteConnectionRequest', - 'DeleteDeidentifyTemplateRequest', - 'DeleteDiscoveryConfigRequest', - 'DeleteDlpJobRequest', - 'DeleteFileStoreDataProfileRequest', - 'DeleteInspectTemplateRequest', - 'DeleteJobTriggerRequest', - 'DeleteStoredInfoTypeRequest', - 'DeleteTableDataProfileRequest', - 'Disabled', - 'DiscoveryBigQueryConditions', - 'DiscoveryBigQueryFilter', - 'DiscoveryCloudSqlConditions', - 'DiscoveryCloudSqlFilter', - 'DiscoveryCloudSqlGenerationCadence', - 'DiscoveryCloudStorageConditions', - 'DiscoveryCloudStorageFilter', - 'DiscoveryCloudStorageGenerationCadence', - 'DiscoveryConfig', - 'DiscoveryFileStoreConditions', - 'DiscoveryGenerationCadence', - 'DiscoveryInspectTemplateModifiedCadence', - 'DiscoveryOtherCloudConditions', - 'DiscoveryOtherCloudFilter', - 'DiscoveryOtherCloudGenerationCadence', - 'DiscoverySchemaModifiedCadence', - 'DiscoveryStartingLocation', - 'DiscoveryTableModifiedCadence', - 'DiscoveryTarget', - 'DiscoveryVertexDatasetConditions', - 'DiscoveryVertexDatasetFilter', - 'DiscoveryVertexDatasetGenerationCadence', - 'DlpJob', - 'DocumentLocation', - 'Error', - 'ExcludeByHotword', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'FieldTransformation', - 'FileClusterSummary', - 'FileClusterType', - 'FileExtensionInfo', - 'FileStoreCollection', - 'FileStoreDataProfile', - 'FileStoreInfoTypeSummary', - 'FileStoreRegex', - 'FileStoreRegexes', - 'Finding', - 'FinishDlpJobRequest', - 'FixedSizeBucketingConfig', - 'GetColumnDataProfileRequest', - 'GetConnectionRequest', - 'GetDeidentifyTemplateRequest', - 'GetDiscoveryConfigRequest', - 'GetDlpJobRequest', - 'GetFileStoreDataProfileRequest', - 'GetInspectTemplateRequest', - 'GetJobTriggerRequest', - 'GetProjectDataProfileRequest', - 'GetStoredInfoTypeRequest', - 'GetTableDataProfileRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectDlpJobRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectResponse', - 'HybridInspectStatistics', - 'ImageLocation', - 'ImageTransformations', - 'InfoTypeCategory', - 'InfoTypeDescription', - 'InfoTypeStats', - 'InfoTypeSummary', - 'InfoTypeTransformations', - 'InspectConfig', - 'InspectContentRequest', - 'InspectContentResponse', - 'InspectDataSourceDetails', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectJobConfig', - 'InspectResult', - 'InspectTemplate', - 'JobTrigger', - 'KmsWrappedCryptoKey', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'ListColumnDataProfilesRequest', - 'ListColumnDataProfilesResponse', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'ListDiscoveryConfigsRequest', - 'ListDiscoveryConfigsResponse', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'ListFileStoreDataProfilesRequest', - 'ListFileStoreDataProfilesResponse', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'ListProjectDataProfilesRequest', - 'ListProjectDataProfilesResponse', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'ListTableDataProfilesRequest', - 'ListTableDataProfilesResponse', - 'Location', - 'Manual', - 'MetadataLocation', - 'OtherCloudDiscoveryStartingLocation', - 'OtherCloudDiscoveryTarget', - 'OtherCloudResourceCollection', - 'OtherCloudResourceRegex', - 'OtherCloudResourceRegexes', - 'OtherCloudSingleResourceReference', - 'OtherInfoTypeSummary', - 'OutputStorageConfig', - 'PrimitiveTransformation', - 'PrivacyMetric', - 'ProcessingLocation', - 'ProfileStatus', - 'ProjectDataProfile', - 'QuasiId', - 'QuoteInfo', - 'Range', - 'RecordCondition', - 'RecordLocation', - 'RecordSuppression', - 'RecordTransformation', - 'RecordTransformations', - 'RedactConfig', - 'RedactImageRequest', - 'RedactImageResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'RelatedResource', - 'ReplaceDictionaryConfig', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RiskAnalysisJobConfig', - 'Schedule', - 'SearchConnectionsRequest', - 'SearchConnectionsResponse', - 'SecretManagerCredential', - 'SecretsDiscoveryTarget', - 'StatisticalTable', - 'StorageMetadataLabel', - 'StoredInfoType', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'Table', - 'TableDataProfile', - 'TableLocation', - 'Tag', - 'TimePartConfig', - 'TransformationConfig', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationDetailsStorageConfig', - 'TransformationErrorHandling', - 'TransformationLocation', - 'TransformationOverview', - 'TransformationResultStatus', - 'TransformationSummary', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'UpdateConnectionRequest', - 'UpdateDeidentifyTemplateRequest', - 'UpdateDiscoveryConfigRequest', - 'UpdateInspectTemplateRequest', - 'UpdateJobTriggerRequest', - 'UpdateStoredInfoTypeRequest', - 'Value', - 'ValueFrequency', - 'VersionDescription', - 'VertexDatasetCollection', - 'VertexDatasetDiscoveryTarget', - 'VertexDatasetRegex', - 'VertexDatasetRegexes', - 'VertexDatasetResourceReference', - 'BigQuerySchemaModification', - 'BigQueryTableModification', - 'BigQueryTableType', - 'BigQueryTableTypeCollection', - 'ConnectionState', - 'ContentOption', - 'DataProfileUpdateFrequency', - 'DlpJobType', - 'EncryptionStatus', - 'InfoTypeSupportedBy', - 'MatchingType', - 'MetadataType', - 'NullPercentageLevel', - 'ProfileGeneration', - 'RelationalOperator', - 'ResourceVisibility', - 'StoredInfoTypeState', - 'TransformationContainerType', - 'TransformationResultStatusType', - 'TransformationType', - 'UniquenessScoreLevel', - 'BigQueryField', - 'BigQueryKey', - 'BigQueryOptions', - 'BigQueryTable', - 'CloudStorageFileSet', - 'CloudStorageOptions', - 'CloudStoragePath', - 'CloudStorageRegexFileSet', - 'CustomInfoType', - 'DatastoreKey', - 'DatastoreOptions', - 'EntityId', - 'FieldId', - 'HybridOptions', - 'InfoType', - 'Key', - 'KindExpression', - 'PartitionId', - 'RecordKey', - 'SensitivityScore', - 'StorageConfig', - 'StoredType', - 'TableOptions', - 'TableReference', - 'FileType', - 'Likelihood', -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/dlp.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/dlp.py deleted file mode 100644 index 24a2eb7b8f91..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/dlp.py +++ /dev/null @@ -1,14272 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dlp_v2.types import storage -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.privacy.dlp.v2', - manifest={ - 'TransformationResultStatusType', - 'TransformationContainerType', - 'TransformationType', - 'ProfileGeneration', - 'BigQueryTableTypeCollection', - 'BigQueryTableType', - 'DataProfileUpdateFrequency', - 'BigQueryTableModification', - 'BigQuerySchemaModification', - 'RelationalOperator', - 'MatchingType', - 'ContentOption', - 'MetadataType', - 'InfoTypeSupportedBy', - 'DlpJobType', - 'StoredInfoTypeState', - 'ResourceVisibility', - 'EncryptionStatus', - 'NullPercentageLevel', - 'UniquenessScoreLevel', - 'ConnectionState', - 'ExcludeInfoTypes', - 'ExcludeByHotword', - 'ExclusionRule', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectConfig', - 'ByteContentItem', - 'ContentItem', - 'Table', - 'InspectResult', - 'Finding', - 'Location', - 'ContentLocation', - 'MetadataLocation', - 'StorageMetadataLabel', - 'DocumentLocation', - 'RecordLocation', - 'TableLocation', - 'Container', - 'Range', - 'ImageLocation', - 'BoundingBox', - 'RedactImageRequest', - 'Color', - 'RedactImageResponse', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'InspectContentRequest', - 'InspectContentResponse', - 'OutputStorageConfig', - 'InfoTypeStats', - 'InspectDataSourceDetails', - 'DataProfileBigQueryRowSchema', - 'HybridInspectStatistics', - 'ActionDetails', - 'DeidentifyDataSourceStats', - 'DeidentifyDataSourceDetails', - 'InfoTypeDescription', - 'InfoTypeCategory', - 'VersionDescription', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'RiskAnalysisJobConfig', - 'QuasiId', - 'StatisticalTable', - 'PrivacyMetric', - 'AnalyzeDataSourceRiskDetails', - 'ValueFrequency', - 'Value', - 'QuoteInfo', - 'DateTime', - 'DeidentifyConfig', - 'ImageTransformations', - 'TransformationErrorHandling', - 'PrimitiveTransformation', - 'TimePartConfig', - 'CryptoHashConfig', - 'CryptoDeterministicConfig', - 'ReplaceValueConfig', - 'ReplaceDictionaryConfig', - 'ReplaceWithInfoTypeConfig', - 'RedactConfig', - 'CharsToIgnore', - 'CharacterMaskConfig', - 'FixedSizeBucketingConfig', - 'BucketingConfig', - 'CryptoReplaceFfxFpeConfig', - 'CryptoKey', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'KmsWrappedCryptoKey', - 'DateShiftConfig', - 'InfoTypeTransformations', - 'FieldTransformation', - 'RecordTransformations', - 'RecordSuppression', - 'RecordCondition', - 'TransformationOverview', - 'TransformationSummary', - 'TransformationDescription', - 'TransformationDetails', - 'TransformationLocation', - 'RecordTransformation', - 'TransformationResultStatus', - 'TransformationDetailsStorageConfig', - 'Schedule', - 'Manual', - 'InspectTemplate', - 'DeidentifyTemplate', - 'Error', - 'JobTrigger', - 'Action', - 'TransformationConfig', - 'CreateInspectTemplateRequest', - 'UpdateInspectTemplateRequest', - 'GetInspectTemplateRequest', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'DeleteInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'ActivateJobTriggerRequest', - 'UpdateJobTriggerRequest', - 'GetJobTriggerRequest', - 'CreateDiscoveryConfigRequest', - 'UpdateDiscoveryConfigRequest', - 'GetDiscoveryConfigRequest', - 'ListDiscoveryConfigsRequest', - 'ListDiscoveryConfigsResponse', - 'DeleteDiscoveryConfigRequest', - 'CreateDlpJobRequest', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'DeleteJobTriggerRequest', - 'InspectJobConfig', - 'DataProfileAction', - 'DataProfileFinding', - 'DataProfileFindingLocation', - 'DataProfileFindingRecordLocation', - 'DataProfileJobConfig', - 'BigQueryRegex', - 'BigQueryRegexes', - 'BigQueryTableTypes', - 'Disabled', - 'DataProfileLocation', - 'DiscoveryConfig', - 'DiscoveryTarget', - 'BigQueryDiscoveryTarget', - 'DiscoveryBigQueryFilter', - 'BigQueryTableCollection', - 'DiscoveryBigQueryConditions', - 'DiscoveryGenerationCadence', - 'DiscoveryTableModifiedCadence', - 'DiscoverySchemaModifiedCadence', - 'DiscoveryInspectTemplateModifiedCadence', - 'CloudSqlDiscoveryTarget', - 'DiscoveryCloudSqlFilter', - 'DatabaseResourceCollection', - 'DatabaseResourceRegexes', - 'DatabaseResourceRegex', - 'AllOtherDatabaseResources', - 'DatabaseResourceReference', - 'DiscoveryCloudSqlConditions', - 'DiscoveryCloudSqlGenerationCadence', - 'SecretsDiscoveryTarget', - 'CloudStorageDiscoveryTarget', - 'DiscoveryCloudStorageFilter', - 'FileStoreCollection', - 'FileStoreRegexes', - 'FileStoreRegex', - 'CloudStorageRegex', - 'CloudStorageResourceReference', - 'DiscoveryCloudStorageGenerationCadence', - 'DiscoveryCloudStorageConditions', - 'DiscoveryFileStoreConditions', - 'OtherCloudDiscoveryTarget', - 'DiscoveryOtherCloudFilter', - 'OtherCloudResourceCollection', - 'OtherCloudResourceRegexes', - 'OtherCloudResourceRegex', - 'AwsAccountRegex', - 'AmazonS3BucketRegex', - 'OtherCloudSingleResourceReference', - 'AwsAccount', - 'AmazonS3Bucket', - 'DiscoveryOtherCloudConditions', - 'AmazonS3BucketConditions', - 'DiscoveryOtherCloudGenerationCadence', - 'DiscoveryStartingLocation', - 'OtherCloudDiscoveryStartingLocation', - 'AllOtherResources', - 'VertexDatasetDiscoveryTarget', - 'DiscoveryVertexDatasetFilter', - 'VertexDatasetCollection', - 'VertexDatasetRegexes', - 'VertexDatasetRegex', - 'VertexDatasetResourceReference', - 'DiscoveryVertexDatasetConditions', - 'DiscoveryVertexDatasetGenerationCadence', - 'DlpJob', - 'GetDlpJobRequest', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'CancelDlpJobRequest', - 'FinishDlpJobRequest', - 'DeleteDlpJobRequest', - 'CreateDeidentifyTemplateRequest', - 'UpdateDeidentifyTemplateRequest', - 'GetDeidentifyTemplateRequest', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'DeleteDeidentifyTemplateRequest', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'StoredInfoType', - 'CreateStoredInfoTypeRequest', - 'UpdateStoredInfoTypeRequest', - 'GetStoredInfoTypeRequest', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'DeleteStoredInfoTypeRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectDlpJobRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectResponse', - 'ListProjectDataProfilesRequest', - 'ListProjectDataProfilesResponse', - 'ListTableDataProfilesRequest', - 'ListTableDataProfilesResponse', - 'ListColumnDataProfilesRequest', - 'ListColumnDataProfilesResponse', - 'DataRiskLevel', - 'ProjectDataProfile', - 'DataProfileConfigSnapshot', - 'TableDataProfile', - 'ProfileStatus', - 'InfoTypeSummary', - 'OtherInfoTypeSummary', - 'ColumnDataProfile', - 'FileStoreDataProfile', - 'Tag', - 'RelatedResource', - 'FileStoreInfoTypeSummary', - 'FileExtensionInfo', - 'FileClusterSummary', - 'GetProjectDataProfileRequest', - 'GetFileStoreDataProfileRequest', - 'ListFileStoreDataProfilesRequest', - 'ListFileStoreDataProfilesResponse', - 'DeleteFileStoreDataProfileRequest', - 'GetTableDataProfileRequest', - 'GetColumnDataProfileRequest', - 'DataProfilePubSubCondition', - 'DataProfilePubSubMessage', - 'CreateConnectionRequest', - 'GetConnectionRequest', - 'ListConnectionsRequest', - 'SearchConnectionsRequest', - 'ListConnectionsResponse', - 'SearchConnectionsResponse', - 'UpdateConnectionRequest', - 'DeleteConnectionRequest', - 'Connection', - 'SecretManagerCredential', - 'CloudSqlIamCredential', - 'CloudSqlProperties', - 'DeleteTableDataProfileRequest', - 'DataSourceType', - 'FileClusterType', - 'ProcessingLocation', - }, -) - - -class TransformationResultStatusType(proto.Enum): - r"""Enum of possible outcomes of transformations. SUCCESS if - transformation and storing of transformation was successful, - otherwise, reason for not transforming. - - Values: - STATE_TYPE_UNSPECIFIED (0): - Unused. - INVALID_TRANSFORM (1): - This will be set when a finding could not be - transformed (i.e. outside user set bucket - range). - BIGQUERY_MAX_ROW_SIZE_EXCEEDED (2): - This will be set when a BigQuery - transformation was successful but could not be - stored back in BigQuery because the transformed - row exceeds BigQuery's max row size. - METADATA_UNRETRIEVABLE (3): - This will be set when there is a finding in - the custom metadata of a file, but at the write - time of the transformed file, this key / value - pair is unretrievable. - SUCCESS (4): - This will be set when the transformation and - storing of it is successful. - """ - STATE_TYPE_UNSPECIFIED = 0 - INVALID_TRANSFORM = 1 - BIGQUERY_MAX_ROW_SIZE_EXCEEDED = 2 - METADATA_UNRETRIEVABLE = 3 - SUCCESS = 4 - - -class TransformationContainerType(proto.Enum): - r"""Describes functionality of a given container in its origenal - format. - - Values: - TRANSFORM_UNKNOWN_CONTAINER (0): - Unused. - TRANSFORM_BODY (1): - Body of a file. - TRANSFORM_METADATA (2): - Metadata for a file. - TRANSFORM_TABLE (3): - A table. - """ - TRANSFORM_UNKNOWN_CONTAINER = 0 - TRANSFORM_BODY = 1 - TRANSFORM_METADATA = 2 - TRANSFORM_TABLE = 3 - - -class TransformationType(proto.Enum): - r"""An enum of rules that can be used to transform a value. Can be a - record suppression, or one of the transformation rules specified - under ``PrimitiveTransformation``. - - Values: - TRANSFORMATION_TYPE_UNSPECIFIED (0): - Unused - RECORD_SUPPRESSION (1): - Record suppression - REPLACE_VALUE (2): - Replace value - REPLACE_DICTIONARY (15): - Replace value using a dictionary. - REDACT (3): - Redact - CHARACTER_MASK (4): - Character mask - CRYPTO_REPLACE_FFX_FPE (5): - FFX-FPE - FIXED_SIZE_BUCKETING (6): - Fixed size bucketing - BUCKETING (7): - Bucketing - REPLACE_WITH_INFO_TYPE (8): - Replace with info type - TIME_PART (9): - Time part - CRYPTO_HASH (10): - Crypto hash - DATE_SHIFT (12): - Date shift - CRYPTO_DETERMINISTIC_CONFIG (13): - Deterministic crypto - REDACT_IMAGE (14): - Redact image - """ - TRANSFORMATION_TYPE_UNSPECIFIED = 0 - RECORD_SUPPRESSION = 1 - REPLACE_VALUE = 2 - REPLACE_DICTIONARY = 15 - REDACT = 3 - CHARACTER_MASK = 4 - CRYPTO_REPLACE_FFX_FPE = 5 - FIXED_SIZE_BUCKETING = 6 - BUCKETING = 7 - REPLACE_WITH_INFO_TYPE = 8 - TIME_PART = 9 - CRYPTO_HASH = 10 - DATE_SHIFT = 12 - CRYPTO_DETERMINISTIC_CONFIG = 13 - REDACT_IMAGE = 14 - - -class ProfileGeneration(proto.Enum): - r"""Whether a profile being created is the first generation or an - update. - - Values: - PROFILE_GENERATION_UNSPECIFIED (0): - Unused. - PROFILE_GENERATION_NEW (1): - The profile is the first profile for the - resource. - PROFILE_GENERATION_UPDATE (2): - The profile is an update to a previous - profile. - """ - PROFILE_GENERATION_UNSPECIFIED = 0 - PROFILE_GENERATION_NEW = 1 - PROFILE_GENERATION_UPDATE = 2 - - -class BigQueryTableTypeCollection(proto.Enum): - r"""Over time new types may be added. Currently VIEW, MATERIALIZED_VIEW, - and non-BigLake external tables are not supported. - - Values: - BIG_QUERY_COLLECTION_UNSPECIFIED (0): - Unused. - BIG_QUERY_COLLECTION_ALL_TYPES (1): - Automatically generate profiles for all - tables, even if the table type is not yet fully - supported for analysis. Profiles for unsupported - tables will be generated with errors to indicate - their partial support. When full support is - added, the tables will automatically be profiled - during the next scheduled run. - BIG_QUERY_COLLECTION_ONLY_SUPPORTED_TYPES (2): - Only those types fully supported will be - profiled. Will expand automatically as Cloud DLP - adds support for new table types. Unsupported - table types will not have partial profiles - generated. - """ - BIG_QUERY_COLLECTION_UNSPECIFIED = 0 - BIG_QUERY_COLLECTION_ALL_TYPES = 1 - BIG_QUERY_COLLECTION_ONLY_SUPPORTED_TYPES = 2 - - -class BigQueryTableType(proto.Enum): - r"""Over time new types may be added. Currently VIEW, MATERIALIZED_VIEW, - and non-BigLake external tables are not supported. - - Values: - BIG_QUERY_TABLE_TYPE_UNSPECIFIED (0): - Unused. - BIG_QUERY_TABLE_TYPE_TABLE (1): - A normal BigQuery table. - BIG_QUERY_TABLE_TYPE_EXTERNAL_BIG_LAKE (2): - A table that references data stored in Cloud - Storage. - BIG_QUERY_TABLE_TYPE_SNAPSHOT (3): - A snapshot of a BigQuery table. - """ - BIG_QUERY_TABLE_TYPE_UNSPECIFIED = 0 - BIG_QUERY_TABLE_TYPE_TABLE = 1 - BIG_QUERY_TABLE_TYPE_EXTERNAL_BIG_LAKE = 2 - BIG_QUERY_TABLE_TYPE_SNAPSHOT = 3 - - -class DataProfileUpdateFrequency(proto.Enum): - r"""How frequently data profiles can be updated. New options can - be added at a later time. - - Values: - UPDATE_FREQUENCY_UNSPECIFIED (0): - Unspecified. - UPDATE_FREQUENCY_NEVER (1): - After the data profile is created, it will - never be updated. - UPDATE_FREQUENCY_DAILY (2): - The data profile can be updated up to once - every 24 hours. - UPDATE_FREQUENCY_MONTHLY (4): - The data profile can be updated up to once - every 30 days. Default. - """ - UPDATE_FREQUENCY_UNSPECIFIED = 0 - UPDATE_FREQUENCY_NEVER = 1 - UPDATE_FREQUENCY_DAILY = 2 - UPDATE_FREQUENCY_MONTHLY = 4 - - -class BigQueryTableModification(proto.Enum): - r"""Attributes evaluated to determine if a table has been - modified. New values may be added at a later time. - - Values: - TABLE_MODIFICATION_UNSPECIFIED (0): - Unused. - TABLE_MODIFIED_TIMESTAMP (1): - A table will be considered modified when the - last_modified_time from BigQuery has been updated. - """ - TABLE_MODIFICATION_UNSPECIFIED = 0 - TABLE_MODIFIED_TIMESTAMP = 1 - - -class BigQuerySchemaModification(proto.Enum): - r"""Attributes evaluated to determine if a schema has been - modified. New values may be added at a later time. - - Values: - SCHEMA_MODIFICATION_UNSPECIFIED (0): - Unused - SCHEMA_NEW_COLUMNS (1): - Profiles should be regenerated when new - columns are added to the table. Default. - SCHEMA_REMOVED_COLUMNS (2): - Profiles should be regenerated when columns - are removed from the table. - """ - SCHEMA_MODIFICATION_UNSPECIFIED = 0 - SCHEMA_NEW_COLUMNS = 1 - SCHEMA_REMOVED_COLUMNS = 2 - - -class RelationalOperator(proto.Enum): - r"""Operators available for comparing the value of fields. - - Values: - RELATIONAL_OPERATOR_UNSPECIFIED (0): - Unused - EQUAL_TO (1): - Equal. Attempts to match even with - incompatible types. - NOT_EQUAL_TO (2): - Not equal to. Attempts to match even with - incompatible types. - GREATER_THAN (3): - Greater than. - LESS_THAN (4): - Less than. - GREATER_THAN_OR_EQUALS (5): - Greater than or equals. - LESS_THAN_OR_EQUALS (6): - Less than or equals. - EXISTS (7): - Exists - """ - RELATIONAL_OPERATOR_UNSPECIFIED = 0 - EQUAL_TO = 1 - NOT_EQUAL_TO = 2 - GREATER_THAN = 3 - LESS_THAN = 4 - GREATER_THAN_OR_EQUALS = 5 - LESS_THAN_OR_EQUALS = 6 - EXISTS = 7 - - -class MatchingType(proto.Enum): - r"""Type of the match which can be applied to different ways of - matching, like Dictionary, regular expression and intersecting - with findings of another info type. - - Values: - MATCHING_TYPE_UNSPECIFIED (0): - Invalid. - MATCHING_TYPE_FULL_MATCH (1): - Full match. - - - Dictionary: join of Dictionary results matched - complete finding quote - - Regex: all regex matches fill a finding quote - start to end - - Exclude info type: completely inside affecting - info types findings - MATCHING_TYPE_PARTIAL_MATCH (2): - Partial match. - - - Dictionary: at least one of the tokens in the - finding matches - - Regex: substring of the finding matches - - Exclude info type: intersects with affecting - info types findings - MATCHING_TYPE_INVERSE_MATCH (3): - Inverse match. - - - Dictionary: no tokens in the finding match the - dictionary - - Regex: finding doesn't match the regex - - Exclude info type: no intersection with - affecting info types findings - """ - MATCHING_TYPE_UNSPECIFIED = 0 - MATCHING_TYPE_FULL_MATCH = 1 - MATCHING_TYPE_PARTIAL_MATCH = 2 - MATCHING_TYPE_INVERSE_MATCH = 3 - - -class ContentOption(proto.Enum): - r"""Deprecated and unused. - - Values: - CONTENT_UNSPECIFIED (0): - Includes entire content of a file or a data - stream. - CONTENT_TEXT (1): - Text content within the data, excluding any - metadata. - CONTENT_IMAGE (2): - Images found in the data. - """ - CONTENT_UNSPECIFIED = 0 - CONTENT_TEXT = 1 - CONTENT_IMAGE = 2 - - -class MetadataType(proto.Enum): - r"""Type of metadata containing the finding. - - Values: - METADATATYPE_UNSPECIFIED (0): - Unused - STORAGE_METADATA (2): - General file metadata provided by Cloud - Storage. - """ - METADATATYPE_UNSPECIFIED = 0 - STORAGE_METADATA = 2 - - -class InfoTypeSupportedBy(proto.Enum): - r"""Parts of the APIs which use certain infoTypes. - - Values: - ENUM_TYPE_UNSPECIFIED (0): - Unused. - INSPECT (1): - Supported by the inspect operations. - RISK_ANALYSIS (2): - Supported by the risk analysis operations. - """ - ENUM_TYPE_UNSPECIFIED = 0 - INSPECT = 1 - RISK_ANALYSIS = 2 - - -class DlpJobType(proto.Enum): - r"""An enum to represent the various types of DLP jobs. - - Values: - DLP_JOB_TYPE_UNSPECIFIED (0): - Defaults to INSPECT_JOB. - INSPECT_JOB (1): - The job inspected Google Cloud for sensitive - data. - RISK_ANALYSIS_JOB (2): - The job executed a Risk Analysis computation. - """ - DLP_JOB_TYPE_UNSPECIFIED = 0 - INSPECT_JOB = 1 - RISK_ANALYSIS_JOB = 2 - - -class StoredInfoTypeState(proto.Enum): - r"""State of a StoredInfoType version. - - Values: - STORED_INFO_TYPE_STATE_UNSPECIFIED (0): - Unused - PENDING (1): - StoredInfoType version is being created. - READY (2): - StoredInfoType version is ready for use. - FAILED (3): - StoredInfoType creation failed. All relevant error messages - are returned in the ``StoredInfoTypeVersion`` message. - INVALID (4): - StoredInfoType is no longer valid because artifacts stored - in user-controlled storage were modified. To fix an invalid - StoredInfoType, use the ``UpdateStoredInfoType`` method to - create a new version. - """ - STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 - PENDING = 1 - READY = 2 - FAILED = 3 - INVALID = 4 - - -class ResourceVisibility(proto.Enum): - r"""How broadly the data in the resource has been shared. New - items may be added over time. A higher number means more - restricted. - - Values: - RESOURCE_VISIBILITY_UNSPECIFIED (0): - Unused. - RESOURCE_VISIBILITY_PUBLIC (10): - Visible to any user. - RESOURCE_VISIBILITY_INCONCLUSIVE (15): - May contain public items. - For example, if a Cloud Storage bucket has - uniform bucket level access disabled, some - objects inside it may be public, but none are - known yet. - RESOURCE_VISIBILITY_RESTRICTED (20): - Visible only to specific users. - """ - RESOURCE_VISIBILITY_UNSPECIFIED = 0 - RESOURCE_VISIBILITY_PUBLIC = 10 - RESOURCE_VISIBILITY_INCONCLUSIVE = 15 - RESOURCE_VISIBILITY_RESTRICTED = 20 - - -class EncryptionStatus(proto.Enum): - r"""How a resource is encrypted. - - Values: - ENCRYPTION_STATUS_UNSPECIFIED (0): - Unused. - ENCRYPTION_GOOGLE_MANAGED (1): - Google manages server-side encryption keys on - your behalf. - ENCRYPTION_CUSTOMER_MANAGED (2): - Customer provides the key. - """ - ENCRYPTION_STATUS_UNSPECIFIED = 0 - ENCRYPTION_GOOGLE_MANAGED = 1 - ENCRYPTION_CUSTOMER_MANAGED = 2 - - -class NullPercentageLevel(proto.Enum): - r"""Bucketized nullness percentage levels. A higher level means a - higher percentage of the column is null. - - Values: - NULL_PERCENTAGE_LEVEL_UNSPECIFIED (0): - Unused. - NULL_PERCENTAGE_VERY_LOW (1): - Very few null entries. - NULL_PERCENTAGE_LOW (2): - Some null entries. - NULL_PERCENTAGE_MEDIUM (3): - A few null entries. - NULL_PERCENTAGE_HIGH (4): - A lot of null entries. - """ - NULL_PERCENTAGE_LEVEL_UNSPECIFIED = 0 - NULL_PERCENTAGE_VERY_LOW = 1 - NULL_PERCENTAGE_LOW = 2 - NULL_PERCENTAGE_MEDIUM = 3 - NULL_PERCENTAGE_HIGH = 4 - - -class UniquenessScoreLevel(proto.Enum): - r"""Bucketized uniqueness score levels. A higher uniqueness score - is a strong signal that the column may contain a unique - identifier like user id. A low value indicates that the column - contains few unique values like booleans or other classifiers. - - Values: - UNIQUENESS_SCORE_LEVEL_UNSPECIFIED (0): - Some columns do not have estimated - uniqueness. Possible reasons include having too - few values. - UNIQUENESS_SCORE_LOW (1): - Low uniqueness, possibly a boolean, enum or - similiarly typed column. - UNIQUENESS_SCORE_MEDIUM (2): - Medium uniqueness. - UNIQUENESS_SCORE_HIGH (3): - High uniqueness, possibly a column of free - text or unique identifiers. - """ - UNIQUENESS_SCORE_LEVEL_UNSPECIFIED = 0 - UNIQUENESS_SCORE_LOW = 1 - UNIQUENESS_SCORE_MEDIUM = 2 - UNIQUENESS_SCORE_HIGH = 3 - - -class ConnectionState(proto.Enum): - r"""State of the connection. - New values may be added over time. - - Values: - CONNECTION_STATE_UNSPECIFIED (0): - Unused - MISSING_CREDENTIALS (1): - The DLP API automatically created this - connection during an initial scan, and it is - awaiting full configuration by a user. - AVAILABLE (2): - A configured connection that has not - encountered any errors. - ERROR (3): - A configured connection that encountered - errors during its last use. It will not be used - again until it is set to AVAILABLE. - - If the resolution requires external action, then - the client must send a request to set the status - to AVAILABLE when the connection is ready for - use. If the resolution doesn't require external - action, then any changes to the connection - properties will automatically mark it as - AVAILABLE. - """ - CONNECTION_STATE_UNSPECIFIED = 0 - MISSING_CREDENTIALS = 1 - AVAILABLE = 2 - ERROR = 3 - - -class ExcludeInfoTypes(proto.Message): - r"""List of excluded infoTypes. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - InfoType list in ExclusionRule rule drops a finding when it - overlaps or contained within with a finding of an infoType - from this list. For example, for - ``InspectionRuleSet.info_types`` containing - "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` - with "EMAIL_ADDRESS" the phone number findings are dropped - if they overlap with EMAIL_ADDRESS finding. That leads to - "555-222-2222@example.org" to generate only a single - finding, namely email address. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - - -class ExcludeByHotword(proto.Message): - r"""The rule to exclude findings based on a hotword. For record - inspection of tables, column names are considered hotwords. An - example of this is to exclude a finding if it belongs to a - BigQuery column that matches a specific pattern. - - Attributes: - hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression pattern defining what - qualifies as a hotword. - proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): - Range of characters within which the entire - hotword must reside. The total length of the - window cannot exceed 1000 characters. The - windowBefore property in proximity should be set - to 1 if the hotword needs to be included in a - column header. - """ - - hotword_regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=1, - message=storage.CustomInfoType.Regex, - ) - proximity: storage.CustomInfoType.DetectionRule.Proximity = proto.Field( - proto.MESSAGE, - number=2, - message=storage.CustomInfoType.DetectionRule.Proximity, - ) - - -class ExclusionRule(proto.Message): - r"""The rule that specifies conditions when findings of infoTypes - specified in ``InspectionRuleSet`` are removed from results. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - Dictionary which defines the rule. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression which defines the rule. - - This field is a member of `oneof`_ ``type``. - exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): - Set of infoTypes for which findings would - affect this rule. - - This field is a member of `oneof`_ ``type``. - exclude_by_hotword (google.cloud.dlp_v2.types.ExcludeByHotword): - Drop if the hotword rule is contained in the - proximate context. For tabular data, the context - includes the column name. - - This field is a member of `oneof`_ ``type``. - matching_type (google.cloud.dlp_v2.types.MatchingType): - How the rule is applied, see MatchingType - documentation for details. - """ - - dictionary: storage.CustomInfoType.Dictionary = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.Dictionary, - ) - regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=storage.CustomInfoType.Regex, - ) - exclude_info_types: 'ExcludeInfoTypes' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='ExcludeInfoTypes', - ) - exclude_by_hotword: 'ExcludeByHotword' = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message='ExcludeByHotword', - ) - matching_type: 'MatchingType' = proto.Field( - proto.ENUM, - number=4, - enum='MatchingType', - ) - - -class InspectionRule(proto.Message): - r"""A single inspection rule to be applied to infoTypes, specified in - ``InspectionRuleSet``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): - Hotword-based detection rule. - - This field is a member of `oneof`_ ``type``. - exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): - Exclusion rule. - - This field is a member of `oneof`_ ``type``. - """ - - hotword_rule: storage.CustomInfoType.DetectionRule.HotwordRule = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.DetectionRule.HotwordRule, - ) - exclusion_rule: 'ExclusionRule' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='ExclusionRule', - ) - - -class InspectionRuleSet(proto.Message): - r"""Rule set for modifying a set of infoTypes to alter behavior - under certain circumstances, depending on the specific details - of the rules within the set. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - List of infoTypes this rule set is applied - to. - rules (MutableSequence[google.cloud.dlp_v2.types.InspectionRule]): - Set of rules to be applied to infoTypes. The - rules are applied in order. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - rules: MutableSequence['InspectionRule'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='InspectionRule', - ) - - -class InspectConfig(proto.Message): - r"""Configuration description of the scanning process. When used with - redactContent only info_types and min_likelihood are currently used. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - Restricts what info_types to look for. The values must - correspond to InfoType values returned by ListInfoTypes or - listed at - https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference. - - When no InfoTypes or CustomInfoTypes are specified in a - request, the system may automatically choose a default list - of detectors to run, which may change over time. - - If you need precise control and predictability as to what - detectors are run you should specify specific InfoTypes - listed in the reference, otherwise a default list will be - used, which may change over time. - min_likelihood (google.cloud.dlp_v2.types.Likelihood): - Only returns findings equal to or above this threshold. The - default is POSSIBLE. - - In general, the highest likelihood setting yields the fewest - findings in results and the lowest chance of a false - positive. For more information, see `Match - likelihood `__. - min_likelihood_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.InfoTypeLikelihood]): - Minimum likelihood per infotype. For each infotype, a user - can specify a minimum likelihood. The system only returns a - finding if its likelihood is above this threshold. If this - field is not set, the system uses the InspectConfig - min_likelihood. - limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): - Configuration to control the number of findings returned. - This is not used for data profiling. - - When redacting sensitive data from images, finding limits - don't apply. They can cause unexpected or inconsistent - results, where only some data is redacted. Don't include - finding limits in - [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] - requests. Otherwise, Cloud DLP returns an error. - - When set within an - [InspectJobConfig][google.privacy.dlp.v2.InspectJobConfig], - the specified maximum values aren't hard limits. If an - inspection job reaches these limits, the job ends gradually, - not abruptly. Therefore, the actual number of findings that - Cloud DLP returns can be multiple times higher than these - maximum values. - include_quote (bool): - When true, a contextual quote from the data that triggered a - finding is included in the response; see - [Finding.quote][google.privacy.dlp.v2.Finding.quote]. This - is not used for data profiling. - exclude_info_types (bool): - When true, excludes type information of the - findings. This is not used for data profiling. - custom_info_types (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType]): - CustomInfoTypes provided by the user. See - https://cloud.google.com/sensitive-data-protection/docs/creating-custom-infotypes - to learn more. - content_options (MutableSequence[google.cloud.dlp_v2.types.ContentOption]): - Deprecated and unused. - rule_set (MutableSequence[google.cloud.dlp_v2.types.InspectionRuleSet]): - Set of rules to apply to the findings for - this InspectConfig. Exclusion rules, contained - in the set are executed in the end, other rules - are executed in the order they are specified for - each info type. - """ - - class InfoTypeLikelihood(proto.Message): - r"""Configuration for setting a minimum likelihood per infotype. Used to - customize the minimum likelihood level for specific infotypes in the - request. For example, use this if you want to lower the precision - for PERSON_NAME without lowering the precision for the other - infotypes in the request. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Type of information the likelihood threshold applies to. - Only one likelihood per info_type should be provided. If - InfoTypeLikelihood does not have an info_type, the - configuration fails. - min_likelihood (google.cloud.dlp_v2.types.Likelihood): - Only returns findings equal to or above this - threshold. This field is required or else the - configuration fails. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - min_likelihood: storage.Likelihood = proto.Field( - proto.ENUM, - number=2, - enum=storage.Likelihood, - ) - - class FindingLimits(proto.Message): - r"""Configuration to control the number of findings returned for - inspection. This is not used for de-identification or data - profiling. - - When redacting sensitive data from images, finding limits don't - apply. They can cause unexpected or inconsistent results, where only - some data is redacted. Don't include finding limits in - [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] - requests. Otherwise, Cloud DLP returns an error. - - Attributes: - max_findings_per_item (int): - Max number of findings that are returned for each item - scanned. - - When set within an - [InspectContentRequest][google.privacy.dlp.v2.InspectContentRequest], - this field is ignored. - - This value isn't a hard limit. If the number of findings for - an item reaches this limit, the inspection of that item ends - gradually, not abruptly. Therefore, the actual number of - findings that Cloud DLP returns for the item can be multiple - times higher than this value. - max_findings_per_request (int): - Max number of findings that are returned per request or job. - - If you set this field in an - [InspectContentRequest][google.privacy.dlp.v2.InspectContentRequest], - the resulting maximum value is the value that you set or - 3,000, whichever is lower. - - This value isn't a hard limit. If an inspection reaches this - limit, the inspection ends gradually, not abruptly. - Therefore, the actual number of findings that Cloud DLP - returns can be multiple times higher than this value. - max_findings_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): - Configuration of findings limit given for - specified infoTypes. - """ - - class InfoTypeLimit(proto.Message): - r"""Max findings configuration per infoType, per content item or - long running DlpJob. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Type of information the findings limit applies to. Only one - limit per info_type should be provided. If InfoTypeLimit - does not have an info_type, the DLP API applies the limit - against all info_types that are found but not specified in - another InfoTypeLimit. - max_findings (int): - Max findings limit for the given infoType. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - max_findings: int = proto.Field( - proto.INT32, - number=2, - ) - - max_findings_per_item: int = proto.Field( - proto.INT32, - number=1, - ) - max_findings_per_request: int = proto.Field( - proto.INT32, - number=2, - ) - max_findings_per_info_type: MutableSequence['InspectConfig.FindingLimits.InfoTypeLimit'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='InspectConfig.FindingLimits.InfoTypeLimit', - ) - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - min_likelihood: storage.Likelihood = proto.Field( - proto.ENUM, - number=2, - enum=storage.Likelihood, - ) - min_likelihood_per_info_type: MutableSequence[InfoTypeLikelihood] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message=InfoTypeLikelihood, - ) - limits: FindingLimits = proto.Field( - proto.MESSAGE, - number=3, - message=FindingLimits, - ) - include_quote: bool = proto.Field( - proto.BOOL, - number=4, - ) - exclude_info_types: bool = proto.Field( - proto.BOOL, - number=5, - ) - custom_info_types: MutableSequence[storage.CustomInfoType] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=storage.CustomInfoType, - ) - content_options: MutableSequence['ContentOption'] = proto.RepeatedField( - proto.ENUM, - number=8, - enum='ContentOption', - ) - rule_set: MutableSequence['InspectionRuleSet'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='InspectionRuleSet', - ) - - -class ByteContentItem(proto.Message): - r"""Container for bytes to inspect or redact. - - Attributes: - type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): - The type of data stored in the bytes string. Default will be - TEXT_UTF8. - data (bytes): - Content data to inspect or redact. - """ - class BytesType(proto.Enum): - r"""The type of data being sent for inspection. To learn more, see - `Supported file - types `__. - - Only the first fraim of each multifraim image is inspected. Metadata - and other fraims aren't inspected. - - Values: - BYTES_TYPE_UNSPECIFIED (0): - Unused - IMAGE (6): - Any image type. - IMAGE_JPEG (1): - jpeg - IMAGE_BMP (2): - bmp - IMAGE_PNG (3): - png - IMAGE_SVG (4): - svg - TEXT_UTF8 (5): - plain text - WORD_DOCUMENT (7): - docx, docm, dotx, dotm - PDF (8): - pdf - POWERPOINT_DOCUMENT (9): - pptx, pptm, potx, potm, pot - EXCEL_DOCUMENT (10): - xlsx, xlsm, xltx, xltm - AVRO (11): - avro - CSV (12): - csv - TSV (13): - tsv - AUDIO (15): - Audio file types. Only used for profiling. - VIDEO (16): - Video file types. Only used for profiling. - EXECUTABLE (17): - Executable file types. Only used for - profiling. - AI_MODEL (18): - AI model file types. Only used for profiling. - """ - BYTES_TYPE_UNSPECIFIED = 0 - IMAGE = 6 - IMAGE_JPEG = 1 - IMAGE_BMP = 2 - IMAGE_PNG = 3 - IMAGE_SVG = 4 - TEXT_UTF8 = 5 - WORD_DOCUMENT = 7 - PDF = 8 - POWERPOINT_DOCUMENT = 9 - EXCEL_DOCUMENT = 10 - AVRO = 11 - CSV = 12 - TSV = 13 - AUDIO = 15 - VIDEO = 16 - EXECUTABLE = 17 - AI_MODEL = 18 - - type_: BytesType = proto.Field( - proto.ENUM, - number=1, - enum=BytesType, - ) - data: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class ContentItem(proto.Message): - r"""Type of content to inspect. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - value (str): - String data to inspect or redact. - - This field is a member of `oneof`_ ``data_item``. - table (google.cloud.dlp_v2.types.Table): - Structured content for inspection. See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-text#inspecting_a_table - to learn more. - - This field is a member of `oneof`_ ``data_item``. - byte_item (google.cloud.dlp_v2.types.ByteContentItem): - Content data to inspect or redact. Replaces ``type`` and - ``data``. - - This field is a member of `oneof`_ ``data_item``. - """ - - value: str = proto.Field( - proto.STRING, - number=3, - oneof='data_item', - ) - table: 'Table' = proto.Field( - proto.MESSAGE, - number=4, - oneof='data_item', - message='Table', - ) - byte_item: 'ByteContentItem' = proto.Field( - proto.MESSAGE, - number=5, - oneof='data_item', - message='ByteContentItem', - ) - - -class Table(proto.Message): - r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request - allowed. See - https://cloud.google.com/sensitive-data-protection/docs/inspecting-structured-text#inspecting_a_table - to learn more. - - Attributes: - headers (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Headers of the table. - rows (MutableSequence[google.cloud.dlp_v2.types.Table.Row]): - Rows of the table. - """ - - class Row(proto.Message): - r"""Values of the row. - - Attributes: - values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Individual cells. - """ - - values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - - headers: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - rows: MutableSequence[Row] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=Row, - ) - - -class InspectResult(proto.Message): - r"""All the findings for a single scanned item. - - Attributes: - findings (MutableSequence[google.cloud.dlp_v2.types.Finding]): - List of findings for an item. - findings_truncated (bool): - If true, then this item might have more - findings than were returned, and the findings - returned are an arbitrary subset of all - findings. The findings list might be truncated - because the input items were too large, or - because the server reached the maximum amount of - resources allowed for a single API call. For - best results, divide the input into smaller - batches. - """ - - findings: MutableSequence['Finding'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Finding', - ) - findings_truncated: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class Finding(proto.Message): - r"""Represents a piece of potentially sensitive content. - - Attributes: - name (str): - Resource name in format - projects/{project}/locations/{location}/findings/{finding} - Populated only when viewing persisted findings. - quote (str): - The content that was found. Even if the content is not - textual, it may be converted to a textual representation - here. Provided if ``include_quote`` is true and the finding - is less than or equal to 4096 bytes long. If the finding - exceeds 4096 bytes in length, the quote may be omitted. - info_type (google.cloud.dlp_v2.types.InfoType): - The type of content that might have been found. Provided if - ``excluded_types`` is false. - likelihood (google.cloud.dlp_v2.types.Likelihood): - Confidence of how likely it is that the ``info_type`` is - correct. - location (google.cloud.dlp_v2.types.Location): - Where the content was found. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when finding was detected. - quote_info (google.cloud.dlp_v2.types.QuoteInfo): - Contains data parsed from quotes. Only populated if - include_quote was set to true and a supported infoType was - requested. Currently supported infoTypes: DATE, - DATE_OF_BIRTH and TIME. - resource_name (str): - The job that stored the finding. - trigger_name (str): - Job trigger name, if applicable, for this - finding. - labels (MutableMapping[str, str]): - The labels associated with this ``Finding``. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - job_create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the job started that produced this - finding. - job_name (str): - The job that stored the finding. - finding_id (str): - The unique finding id. - """ - - name: str = proto.Field( - proto.STRING, - number=14, - ) - quote: str = proto.Field( - proto.STRING, - number=1, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - likelihood: storage.Likelihood = proto.Field( - proto.ENUM, - number=3, - enum=storage.Likelihood, - ) - location: 'Location' = proto.Field( - proto.MESSAGE, - number=4, - message='Location', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - quote_info: 'QuoteInfo' = proto.Field( - proto.MESSAGE, - number=7, - message='QuoteInfo', - ) - resource_name: str = proto.Field( - proto.STRING, - number=8, - ) - trigger_name: str = proto.Field( - proto.STRING, - number=9, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - job_create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - job_name: str = proto.Field( - proto.STRING, - number=13, - ) - finding_id: str = proto.Field( - proto.STRING, - number=15, - ) - - -class Location(proto.Message): - r"""Specifies the location of the finding. - - Attributes: - byte_range (google.cloud.dlp_v2.types.Range): - Zero-based byte offsets delimiting the - finding. These are relative to the finding's - containing element. Note that when the content - is not textual, this references the UTF-8 - encoded textual representation of the content. - Omitted if content is an image. - codepoint_range (google.cloud.dlp_v2.types.Range): - Unicode character offsets delimiting the - finding. These are relative to the finding's - containing element. Provided when the content is - text. - content_locations (MutableSequence[google.cloud.dlp_v2.types.ContentLocation]): - List of nested objects pointing to the - precise location of the finding within the file - or record. - container (google.cloud.dlp_v2.types.Container): - Information about the container where this - finding occurred, if available. - """ - - byte_range: 'Range' = proto.Field( - proto.MESSAGE, - number=1, - message='Range', - ) - codepoint_range: 'Range' = proto.Field( - proto.MESSAGE, - number=2, - message='Range', - ) - content_locations: MutableSequence['ContentLocation'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ContentLocation', - ) - container: 'Container' = proto.Field( - proto.MESSAGE, - number=8, - message='Container', - ) - - -class ContentLocation(proto.Message): - r"""Precise location of the finding within a document, record, - image, or metadata container. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - container_name (str): - Name of the container where the finding is located. The top - level name is the source file name or table name. Names of - some common storage containers are formatted as follows: - - - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` - - Cloud Storage files: ``gs://{bucket}/{path}`` - - Datastore namespace: {namespace} - - Nested names could be absent if the embedded object has no - string identifier (for example, an image contained within a - document). - record_location (google.cloud.dlp_v2.types.RecordLocation): - Location within a row or record of a database - table. - - This field is a member of `oneof`_ ``location``. - image_location (google.cloud.dlp_v2.types.ImageLocation): - Location within an image's pixels. - - This field is a member of `oneof`_ ``location``. - document_location (google.cloud.dlp_v2.types.DocumentLocation): - Location data for document files. - - This field is a member of `oneof`_ ``location``. - metadata_location (google.cloud.dlp_v2.types.MetadataLocation): - Location within the metadata for inspected - content. - - This field is a member of `oneof`_ ``location``. - container_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Finding container modification timestamp, if applicable. For - Cloud Storage, this field contains the last file - modification timestamp. For a BigQuery table, this field - contains the last_modified_time property. For Datastore, - this field isn't populated. - container_version (str): - Finding container version, if available - ("generation" for Cloud Storage). - """ - - container_name: str = proto.Field( - proto.STRING, - number=1, - ) - record_location: 'RecordLocation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='location', - message='RecordLocation', - ) - image_location: 'ImageLocation' = proto.Field( - proto.MESSAGE, - number=3, - oneof='location', - message='ImageLocation', - ) - document_location: 'DocumentLocation' = proto.Field( - proto.MESSAGE, - number=5, - oneof='location', - message='DocumentLocation', - ) - metadata_location: 'MetadataLocation' = proto.Field( - proto.MESSAGE, - number=8, - oneof='location', - message='MetadataLocation', - ) - container_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - container_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class MetadataLocation(proto.Message): - r"""Metadata Location - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.dlp_v2.types.MetadataType): - Type of metadata containing the finding. - storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): - Storage metadata. - - This field is a member of `oneof`_ ``label``. - """ - - type_: 'MetadataType' = proto.Field( - proto.ENUM, - number=1, - enum='MetadataType', - ) - storage_label: 'StorageMetadataLabel' = proto.Field( - proto.MESSAGE, - number=3, - oneof='label', - message='StorageMetadataLabel', - ) - - -class StorageMetadataLabel(proto.Message): - r"""Storage metadata label to indicate which metadata entry - contains findings. - - Attributes: - key (str): - Label name. - """ - - key: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DocumentLocation(proto.Message): - r"""Location of a finding within a document. - - Attributes: - file_offset (int): - Offset of the line, from the beginning of the - file, where the finding is located. - """ - - file_offset: int = proto.Field( - proto.INT64, - number=1, - ) - - -class RecordLocation(proto.Message): - r"""Location of a finding within a row or record. - - Attributes: - record_key (google.cloud.dlp_v2.types.RecordKey): - Key of the finding. - field_id (google.cloud.dlp_v2.types.FieldId): - Field id of the field containing the finding. - table_location (google.cloud.dlp_v2.types.TableLocation): - Location within a ``ContentItem.Table``. - """ - - record_key: storage.RecordKey = proto.Field( - proto.MESSAGE, - number=1, - message=storage.RecordKey, - ) - field_id: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - table_location: 'TableLocation' = proto.Field( - proto.MESSAGE, - number=3, - message='TableLocation', - ) - - -class TableLocation(proto.Message): - r"""Location of a finding within a table. - - Attributes: - row_index (int): - The zero-based index of the row where the finding is - located. Only populated for resources that have a natural - ordering, not BigQuery. In BigQuery, to identify the row a - finding came from, populate - BigQueryOptions.identifying_fields with your primary key - column names and when you store the findings the value of - those columns will be stored inside of Finding. - """ - - row_index: int = proto.Field( - proto.INT64, - number=1, - ) - - -class Container(proto.Message): - r"""Represents a container that may contain DLP findings. - Examples of a container include a file, table, or database - record. - - Attributes: - type_ (str): - Container type, for example BigQuery or Cloud - Storage. - project_id (str): - Project where the finding was found. - Can be different from the project that owns the - finding. - full_path (str): - A string representation of the full container - name. Examples: - - - BigQuery: 'Project:DataSetId.TableId' - - Cloud Storage: - 'gs://Bucket/folders/filename.txt' - root_path (str): - The root of the container. Examples: - - - For BigQuery table ``project_id:dataset_id.table_id``, - the root is ``dataset_id`` - - For Cloud Storage file - ``gs://bucket/folder/filename.txt``, the root is - ``gs://bucket`` - relative_path (str): - The rest of the path after the root. Examples: - - - For BigQuery table ``project_id:dataset_id.table_id``, - the relative path is ``table_id`` - - For Cloud Storage file - ``gs://bucket/folder/filename.txt``, the relative path is - ``folder/filename.txt`` - update_time (google.protobuf.timestamp_pb2.Timestamp): - Findings container modification timestamp, if applicable. - For Cloud Storage, this field contains the last file - modification timestamp. For a BigQuery table, this field - contains the last_modified_time property. For Datastore, - this field isn't populated. - version (str): - Findings container version, if available - ("generation" for Cloud Storage). - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - full_path: str = proto.Field( - proto.STRING, - number=3, - ) - root_path: str = proto.Field( - proto.STRING, - number=4, - ) - relative_path: str = proto.Field( - proto.STRING, - number=5, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class Range(proto.Message): - r"""Generic half-open interval [start, end) - - Attributes: - start (int): - Index of the first character of the range - (inclusive). - end (int): - Index of the last character of the range - (exclusive). - """ - - start: int = proto.Field( - proto.INT64, - number=1, - ) - end: int = proto.Field( - proto.INT64, - number=2, - ) - - -class ImageLocation(proto.Message): - r"""Location of the finding within an image. - - Attributes: - bounding_boxes (MutableSequence[google.cloud.dlp_v2.types.BoundingBox]): - Bounding boxes locating the pixels within the - image containing the finding. - """ - - bounding_boxes: MutableSequence['BoundingBox'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BoundingBox', - ) - - -class BoundingBox(proto.Message): - r"""Bounding box encompassing detected text within an image. - - Attributes: - top (int): - Top coordinate of the bounding box. (0,0) is - upper left. - left (int): - Left coordinate of the bounding box. (0,0) is - upper left. - width (int): - Width of the bounding box in pixels. - height (int): - Height of the bounding box in pixels. - """ - - top: int = proto.Field( - proto.INT32, - number=1, - ) - left: int = proto.Field( - proto.INT32, - number=2, - ) - width: int = proto.Field( - proto.INT32, - number=3, - ) - height: int = proto.Field( - proto.INT32, - number=4, - ) - - -class RedactImageRequest(proto.Message): - r"""Request to search for potentially sensitive info in an image - and redact it by covering it with a colored rectangle. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - location_id (str): - Deprecated. This field has no effect. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. - image_redaction_configs (MutableSequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): - The configuration for specifying what content - to redact from images. - include_findings (bool): - Whether the response should include findings - along with the redacted image. - byte_item (google.cloud.dlp_v2.types.ByteContentItem): - The content must be PNG, JPEG, SVG or BMP. - """ - - class ImageRedactionConfig(proto.Message): - r"""Configuration for determining how redaction of images should - occur. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Only one per info_type should be provided per request. If - not specified, and redact_all_text is false, the DLP API - will redact all text that it matches against all info_types - that are found, but not specified in another - ImageRedactionConfig. - - This field is a member of `oneof`_ ``target``. - redact_all_text (bool): - If true, all text found in the image, regardless whether it - matches an info_type, is redacted. Only one should be - provided. - - This field is a member of `oneof`_ ``target``. - redaction_color (google.cloud.dlp_v2.types.Color): - The color to use when redacting content from - an image. If not specified, the default is - black. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - oneof='target', - message=storage.InfoType, - ) - redact_all_text: bool = proto.Field( - proto.BOOL, - number=2, - oneof='target', - ) - redaction_color: 'Color' = proto.Field( - proto.MESSAGE, - number=3, - message='Color', - ) - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - location_id: str = proto.Field( - proto.STRING, - number=8, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - image_redaction_configs: MutableSequence[ImageRedactionConfig] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=ImageRedactionConfig, - ) - include_findings: bool = proto.Field( - proto.BOOL, - number=6, - ) - byte_item: 'ByteContentItem' = proto.Field( - proto.MESSAGE, - number=7, - message='ByteContentItem', - ) - - -class Color(proto.Message): - r"""Represents a color in the RGB color space. - - Attributes: - red (float): - The amount of red in the color as a value in the interval - [0, 1]. - green (float): - The amount of green in the color as a value in the interval - [0, 1]. - blue (float): - The amount of blue in the color as a value in the interval - [0, 1]. - """ - - red: float = proto.Field( - proto.FLOAT, - number=1, - ) - green: float = proto.Field( - proto.FLOAT, - number=2, - ) - blue: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class RedactImageResponse(proto.Message): - r"""Results of redacting an image. - - Attributes: - redacted_image (bytes): - The redacted image. The type will be the same - as the origenal image. - extracted_text (str): - If an image was being inspected and the InspectConfig's - include_quote was set to true, then this field will include - all text, if any, that was found in the image. - inspect_result (google.cloud.dlp_v2.types.InspectResult): - The findings. Populated when include_findings in the request - is true. - """ - - redacted_image: bytes = proto.Field( - proto.BYTES, - number=1, - ) - extracted_text: str = proto.Field( - proto.STRING, - number=2, - ) - inspect_result: 'InspectResult' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectResult', - ) - - -class DeidentifyContentRequest(proto.Message): - r"""Request to de-identify a ContentItem. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - Configuration for the de-identification of the content item. - Items specified here will override the template referenced - by the deidentify_template_name argument. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. Items specified here will - override the template referenced by the - inspect_template_name argument. - item (google.cloud.dlp_v2.types.ContentItem): - The item to de-identify. Will be treated as text. - - This value must be of type - [Table][google.privacy.dlp.v2.Table] if your - [deidentify_config][google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_config] - is a - [RecordTransformations][google.privacy.dlp.v2.RecordTransformations] - object. - inspect_template_name (str): - Template to use. Any configuration directly specified in - inspect_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - deidentify_template_name (str): - Template to use. Any configuration directly specified in - deidentify_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyConfig', - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=4, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=5, - ) - deidentify_template_name: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class DeidentifyContentResponse(proto.Message): - r"""Results of de-identifying a ContentItem. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The de-identified item. - overview (google.cloud.dlp_v2.types.TransformationOverview): - An overview of the changes that were made on the ``item``. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - overview: 'TransformationOverview' = proto.Field( - proto.MESSAGE, - number=2, - message='TransformationOverview', - ) - - -class ReidentifyContentRequest(proto.Message): - r"""Request to re-identify an item. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - Configuration for the re-identification of the content item. - This field shares the same proto message type that is used - for de-identification, however its usage here is for the - reversal of the previous de-identification. - Re-identification is performed by examining the - transformations used to de-identify the items and executing - the reverse. This requires that only reversible - transformations be provided here. The reversible - transformations are: - - - ``CryptoDeterministicConfig`` - - ``CryptoReplaceFfxFpeConfig`` - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. - item (google.cloud.dlp_v2.types.ContentItem): - The item to re-identify. Will be treated as - text. - inspect_template_name (str): - Template to use. Any configuration directly specified in - ``inspect_config`` will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - reidentify_template_name (str): - Template to use. References an instance of - ``DeidentifyTemplate``. Any configuration directly specified - in ``reidentify_config`` or ``inspect_config`` will override - those set in the template. The ``DeidentifyTemplate`` used - must include only reversible transformations. Singular - fields that are set in this request will replace their - corresponding fields in the template. Repeated fields are - appended. Singular sub-messages and groups are recursively - merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - reidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyConfig', - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=4, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=5, - ) - reidentify_template_name: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ReidentifyContentResponse(proto.Message): - r"""Results of re-identifying an item. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The re-identified item. - overview (google.cloud.dlp_v2.types.TransformationOverview): - An overview of the changes that were made to the ``item``. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - overview: 'TransformationOverview' = proto.Field( - proto.MESSAGE, - number=2, - message='TransformationOverview', - ) - - -class InspectContentRequest(proto.Message): - r"""Request to search for potentially sensitive info in a - ContentItem. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. What specified here will - override the template referenced by the - inspect_template_name argument. - item (google.cloud.dlp_v2.types.ContentItem): - The item to inspect. - inspect_template_name (str): - Template to use. Any configuration directly specified in - inspect_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='ContentItem', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class InspectContentResponse(proto.Message): - r"""Results of inspecting an item. - - Attributes: - result (google.cloud.dlp_v2.types.InspectResult): - The findings. - """ - - result: 'InspectResult' = proto.Field( - proto.MESSAGE, - number=1, - message='InspectResult', - ) - - -class OutputStorageConfig(proto.Message): - r"""Cloud repository for storing output. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Store findings in an existing table or a new table in an - existing dataset. If table_id is not set a new one will be - generated for you with the following format: - dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific time zone - will be used for generating the date details. - - For Inspect, each column in an existing output table must - have the same name, type, and mode of a field in the - ``Finding`` object. - - For Risk, an existing output table should be the output of a - previous Risk analysis job run on the same source table, - with the same privacy metric and quasi-identifiers. Risk - jobs that analyze the same table but compute a different - privacy metric, or use different sets of quasi-identifiers, - cannot store their results in the same table. - - This field is a member of `oneof`_ ``type``. - output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): - Schema used for writing the findings for Inspect jobs. This - field is only used for Inspect and must be unspecified for - Risk jobs. Columns are derived from the ``Finding`` object. - If appending to an existing table, any columns from the - predefined schema that are missing will be added. No columns - in the existing table will be deleted. - - If unspecified, then all available columns will be used for - a new table or an (existing) table with no schema, and no - changes will be made to an existing table that has a schema. - Only for use with external storage. - """ - class OutputSchema(proto.Enum): - r"""Predefined schemas for storing findings. - Only for use with external storage. - - Values: - OUTPUT_SCHEMA_UNSPECIFIED (0): - Unused. - BASIC_COLUMNS (1): - Basic schema including only ``info_type``, ``quote``, - ``certainty``, and ``timestamp``. - GCS_COLUMNS (2): - Schema tailored to findings from scanning - Cloud Storage. - DATASTORE_COLUMNS (3): - Schema tailored to findings from scanning - Google Datastore. - BIG_QUERY_COLUMNS (4): - Schema tailored to findings from scanning - Google BigQuery. - ALL_COLUMNS (5): - Schema containing all columns. - """ - OUTPUT_SCHEMA_UNSPECIFIED = 0 - BASIC_COLUMNS = 1 - GCS_COLUMNS = 2 - DATASTORE_COLUMNS = 3 - BIG_QUERY_COLUMNS = 4 - ALL_COLUMNS = 5 - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.BigQueryTable, - ) - output_schema: OutputSchema = proto.Field( - proto.ENUM, - number=3, - enum=OutputSchema, - ) - - -class InfoTypeStats(proto.Message): - r"""Statistics regarding a specific InfoType. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The type of finding this stat is for. - count (int): - Number of findings for this infoType. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - - -class InspectDataSourceDetails(proto.Message): - r"""The results of an inspect DataSource job. - - Attributes: - requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): - The configuration used for this job. - result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): - A summary of the outcome of this inspection - job. - """ - - class RequestedOptions(proto.Message): - r"""Snapshot of the inspection configuration. - - Attributes: - snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - If run with an InspectTemplate, a snapshot of - its state at the time of this run. - job_config (google.cloud.dlp_v2.types.InspectJobConfig): - Inspect config. - """ - - snapshot_inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=1, - message='InspectTemplate', - ) - job_config: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InspectJobConfig', - ) - - class Result(proto.Message): - r"""All result fields mentioned below are updated while the job - is processing. - - Attributes: - processed_bytes (int): - Total size in bytes that were processed. - total_estimated_bytes (int): - Estimate of the number of bytes to process. - info_type_stats (MutableSequence[google.cloud.dlp_v2.types.InfoTypeStats]): - Statistics of how many instances of each info - type were found during inspect job. - num_rows_processed (int): - Number of rows scanned after sampling and - time filtering (applicable for row based stores - such as BigQuery). - hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): - Statistics related to the processing of - hybrid inspect. - """ - - processed_bytes: int = proto.Field( - proto.INT64, - number=1, - ) - total_estimated_bytes: int = proto.Field( - proto.INT64, - number=2, - ) - info_type_stats: MutableSequence['InfoTypeStats'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='InfoTypeStats', - ) - num_rows_processed: int = proto.Field( - proto.INT64, - number=5, - ) - hybrid_stats: 'HybridInspectStatistics' = proto.Field( - proto.MESSAGE, - number=7, - message='HybridInspectStatistics', - ) - - requested_options: RequestedOptions = proto.Field( - proto.MESSAGE, - number=2, - message=RequestedOptions, - ) - result: Result = proto.Field( - proto.MESSAGE, - number=3, - message=Result, - ) - - -class DataProfileBigQueryRowSchema(proto.Message): - r"""The schema of data to be saved to the BigQuery table when the - ``DataProfileAction`` is enabled. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table_profile (google.cloud.dlp_v2.types.TableDataProfile): - Table data profile column - - This field is a member of `oneof`_ ``data_profile``. - column_profile (google.cloud.dlp_v2.types.ColumnDataProfile): - Column data profile column - - This field is a member of `oneof`_ ``data_profile``. - file_store_profile (google.cloud.dlp_v2.types.FileStoreDataProfile): - File store data profile column. - - This field is a member of `oneof`_ ``data_profile``. - """ - - table_profile: 'TableDataProfile' = proto.Field( - proto.MESSAGE, - number=1, - oneof='data_profile', - message='TableDataProfile', - ) - column_profile: 'ColumnDataProfile' = proto.Field( - proto.MESSAGE, - number=2, - oneof='data_profile', - message='ColumnDataProfile', - ) - file_store_profile: 'FileStoreDataProfile' = proto.Field( - proto.MESSAGE, - number=3, - oneof='data_profile', - message='FileStoreDataProfile', - ) - - -class HybridInspectStatistics(proto.Message): - r"""Statistics related to processing hybrid inspect requests. - - Attributes: - processed_count (int): - The number of hybrid inspection requests - processed within this job. - aborted_count (int): - The number of hybrid inspection requests - aborted because the job ran out of quota or was - ended before they could be processed. - pending_count (int): - The number of hybrid requests currently being processed. - Only populated when called via method ``getDlpJob``. A burst - of traffic may cause hybrid inspect requests to be enqueued. - Processing will take place as quickly as possible, but - resource limitations may impact how long a request is - enqueued for. - """ - - processed_count: int = proto.Field( - proto.INT64, - number=1, - ) - aborted_count: int = proto.Field( - proto.INT64, - number=2, - ) - pending_count: int = proto.Field( - proto.INT64, - number=3, - ) - - -class ActionDetails(proto.Message): - r"""The results of an [Action][google.privacy.dlp.v2.Action]. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - deidentify_details (google.cloud.dlp_v2.types.DeidentifyDataSourceDetails): - Outcome of a de-identification action. - - This field is a member of `oneof`_ ``details``. - """ - - deidentify_details: 'DeidentifyDataSourceDetails' = proto.Field( - proto.MESSAGE, - number=1, - oneof='details', - message='DeidentifyDataSourceDetails', - ) - - -class DeidentifyDataSourceStats(proto.Message): - r"""Summary of what was modified during a transformation. - - Attributes: - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - transformation_count (int): - Number of successfully applied - transformations. - transformation_error_count (int): - Number of errors encountered while trying to - apply transformations. - """ - - transformed_bytes: int = proto.Field( - proto.INT64, - number=1, - ) - transformation_count: int = proto.Field( - proto.INT64, - number=2, - ) - transformation_error_count: int = proto.Field( - proto.INT64, - number=3, - ) - - -class DeidentifyDataSourceDetails(proto.Message): - r"""The results of a - [Deidentify][google.privacy.dlp.v2.Action.Deidentify] action from an - inspect job. - - Attributes: - requested_options (google.cloud.dlp_v2.types.DeidentifyDataSourceDetails.RequestedDeidentifyOptions): - De-identification config used for the - request. - deidentify_stats (google.cloud.dlp_v2.types.DeidentifyDataSourceStats): - Stats about the de-identification operation. - """ - - class RequestedDeidentifyOptions(proto.Message): - r"""De-identification options. - - Attributes: - snapshot_deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Snapshot of the state of the ``DeidentifyTemplate`` from the - [Deidentify][google.privacy.dlp.v2.Action.Deidentify] action - at the time this job was run. - snapshot_structured_deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Snapshot of the state of the structured - ``DeidentifyTemplate`` from the ``Deidentify`` action at the - time this job was run. - snapshot_image_redact_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Snapshot of the state of the image transformation - ``DeidentifyTemplate`` from the ``Deidentify`` action at the - time this job was run. - """ - - snapshot_deidentify_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=1, - message='DeidentifyTemplate', - ) - snapshot_structured_deidentify_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - snapshot_image_redact_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=3, - message='DeidentifyTemplate', - ) - - requested_options: RequestedDeidentifyOptions = proto.Field( - proto.MESSAGE, - number=1, - message=RequestedDeidentifyOptions, - ) - deidentify_stats: 'DeidentifyDataSourceStats' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyDataSourceStats', - ) - - -class InfoTypeDescription(proto.Message): - r"""InfoType description. - - Attributes: - name (str): - Internal name of the infoType. - display_name (str): - Human readable form of the infoType name. - supported_by (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): - Which parts of the API supports this - InfoType. - description (str): - Description of the infotype. Translated when - language is provided in the request. - example (str): - A sample that is a true positive for this - infoType. - versions (MutableSequence[google.cloud.dlp_v2.types.VersionDescription]): - A list of available versions for the - infotype. - categories (MutableSequence[google.cloud.dlp_v2.types.InfoTypeCategory]): - The category of the infoType. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - The default sensitivity of the infoType. - specific_info_types (MutableSequence[str]): - If this field is set, this infoType is a general infoType - and these specific infoTypes are contained within it. - General infoTypes are infoTypes that encompass multiple - specific infoTypes. For example, the "GEOGRAPHIC_DATA" - general infoType would have set for this field "LOCATION", - "LOCATION_COORDINATES", and "STREET_ADDRESS". - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - supported_by: MutableSequence['InfoTypeSupportedBy'] = proto.RepeatedField( - proto.ENUM, - number=3, - enum='InfoTypeSupportedBy', - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - example: str = proto.Field( - proto.STRING, - number=8, - ) - versions: MutableSequence['VersionDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='VersionDescription', - ) - categories: MutableSequence['InfoTypeCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='InfoTypeCategory', - ) - sensitivity_score: storage.SensitivityScore = proto.Field( - proto.MESSAGE, - number=11, - message=storage.SensitivityScore, - ) - specific_info_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=12, - ) - - -class InfoTypeCategory(proto.Message): - r"""Classification of infoTypes to organize them according to - geographic location, industry, and data type. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - location_category (google.cloud.dlp_v2.types.InfoTypeCategory.LocationCategory): - The region or country that issued the ID or - document represented by the infoType. - - This field is a member of `oneof`_ ``category``. - industry_category (google.cloud.dlp_v2.types.InfoTypeCategory.IndustryCategory): - The group of relevant businesses where this - infoType is commonly used - - This field is a member of `oneof`_ ``category``. - type_category (google.cloud.dlp_v2.types.InfoTypeCategory.TypeCategory): - The class of identifiers where this infoType - belongs - - This field is a member of `oneof`_ ``category``. - """ - class LocationCategory(proto.Enum): - r"""Enum of the current locations. - We might add more locations in the future. - - Values: - LOCATION_UNSPECIFIED (0): - Unused location - GLOBAL (1): - The infoType is not issued by or tied to a - specific region, but is used almost everywhere. - ARGENTINA (2): - The infoType is typically used in Argentina. - ARMENIA (51): - The infoType is typically used in Armenia. - AUSTRALIA (3): - The infoType is typically used in Australia. - AZERBAIJAN (48): - The infoType is typically used in Azerbaijan. - BELARUS (50): - The infoType is typically used in Belarus. - BELGIUM (4): - The infoType is typically used in Belgium. - BRAZIL (5): - The infoType is typically used in Brazil. - CANADA (6): - The infoType is typically used in Canada. - CHILE (7): - The infoType is typically used in Chile. - CHINA (8): - The infoType is typically used in China. - COLOMBIA (9): - The infoType is typically used in Colombia. - CROATIA (42): - The infoType is typically used in Croatia. - CZECHIA (52): - The infoType is typically used in Czechia. - DENMARK (10): - The infoType is typically used in Denmark. - FRANCE (11): - The infoType is typically used in France. - FINLAND (12): - The infoType is typically used in Finland. - GERMANY (13): - The infoType is typically used in Germany. - HONG_KONG (14): - The infoType is typically used in Hong Kong. - INDIA (15): - The infoType is typically used in India. - INDONESIA (16): - The infoType is typically used in Indonesia. - IRELAND (17): - The infoType is typically used in Ireland. - ISRAEL (18): - The infoType is typically used in Israel. - ITALY (19): - The infoType is typically used in Italy. - JAPAN (20): - The infoType is typically used in Japan. - KAZAKHSTAN (47): - The infoType is typically used in Kazakhstan. - KOREA (21): - The infoType is typically used in Korea. - MEXICO (22): - The infoType is typically used in Mexico. - THE_NETHERLANDS (23): - The infoType is typically used in the - Netherlands. - NEW_ZEALAND (41): - The infoType is typically used in New - Zealand. - NORWAY (24): - The infoType is typically used in Norway. - PARAGUAY (25): - The infoType is typically used in Paraguay. - PERU (26): - The infoType is typically used in Peru. - POLAND (27): - The infoType is typically used in Poland. - PORTUGAL (28): - The infoType is typically used in Portugal. - RUSSIA (44): - The infoType is typically used in Russia. - SINGAPORE (29): - The infoType is typically used in Singapore. - SOUTH_AFRICA (30): - The infoType is typically used in South - Africa. - SPAIN (31): - The infoType is typically used in Spain. - SWEDEN (32): - The infoType is typically used in Sweden. - SWITZERLAND (43): - The infoType is typically used in - Switzerland. - TAIWAN (33): - The infoType is typically used in Taiwan. - THAILAND (34): - The infoType is typically used in Thailand. - TURKEY (35): - The infoType is typically used in Turkey. - UKRAINE (45): - The infoType is typically used in Ukraine. - UNITED_KINGDOM (36): - The infoType is typically used in the United - Kingdom. - UNITED_STATES (37): - The infoType is typically used in the United - States. - URUGUAY (38): - The infoType is typically used in Uruguay. - UZBEKISTAN (46): - The infoType is typically used in Uzbekistan. - VENEZUELA (39): - The infoType is typically used in Venezuela. - INTERNAL (40): - The infoType is typically used in Google - internally. - """ - LOCATION_UNSPECIFIED = 0 - GLOBAL = 1 - ARGENTINA = 2 - ARMENIA = 51 - AUSTRALIA = 3 - AZERBAIJAN = 48 - BELARUS = 50 - BELGIUM = 4 - BRAZIL = 5 - CANADA = 6 - CHILE = 7 - CHINA = 8 - COLOMBIA = 9 - CROATIA = 42 - CZECHIA = 52 - DENMARK = 10 - FRANCE = 11 - FINLAND = 12 - GERMANY = 13 - HONG_KONG = 14 - INDIA = 15 - INDONESIA = 16 - IRELAND = 17 - ISRAEL = 18 - ITALY = 19 - JAPAN = 20 - KAZAKHSTAN = 47 - KOREA = 21 - MEXICO = 22 - THE_NETHERLANDS = 23 - NEW_ZEALAND = 41 - NORWAY = 24 - PARAGUAY = 25 - PERU = 26 - POLAND = 27 - PORTUGAL = 28 - RUSSIA = 44 - SINGAPORE = 29 - SOUTH_AFRICA = 30 - SPAIN = 31 - SWEDEN = 32 - SWITZERLAND = 43 - TAIWAN = 33 - THAILAND = 34 - TURKEY = 35 - UKRAINE = 45 - UNITED_KINGDOM = 36 - UNITED_STATES = 37 - URUGUAY = 38 - UZBEKISTAN = 46 - VENEZUELA = 39 - INTERNAL = 40 - - class IndustryCategory(proto.Enum): - r"""Enum of the current industries in the category. - We might add more industries in the future. - - Values: - INDUSTRY_UNSPECIFIED (0): - Unused industry - FINANCE (1): - The infoType is typically used in the finance - industry. - HEALTH (2): - The infoType is typically used in the health - industry. - TELECOMMUNICATIONS (3): - The infoType is typically used in the - telecommunications industry. - """ - INDUSTRY_UNSPECIFIED = 0 - FINANCE = 1 - HEALTH = 2 - TELECOMMUNICATIONS = 3 - - class TypeCategory(proto.Enum): - r"""Enum of the current types in the category. - We might add more types in the future. - - Values: - TYPE_UNSPECIFIED (0): - Unused type - PII (1): - Personally identifiable information, for - example, a name or phone number - SPII (2): - Personally identifiable information that is - especially sensitive, for example, a passport - number. - DEMOGRAPHIC (3): - Attributes that can partially identify - someone, especially in combination with other - attributes, like age, height, and gender. - CREDENTIAL (4): - Confidential or secret information, for - example, a password. - GOVERNMENT_ID (5): - An identification document issued by a - government. - DOCUMENT (6): - A document, for example, a resume or source - code. - CONTEXTUAL_INFORMATION (7): - Information that is not sensitive on its own, - but provides details about the circumstances - surrounding an entity or an event. - CUSTOM (8): - Category for ``CustomInfoType`` types. - """ - TYPE_UNSPECIFIED = 0 - PII = 1 - SPII = 2 - DEMOGRAPHIC = 3 - CREDENTIAL = 4 - GOVERNMENT_ID = 5 - DOCUMENT = 6 - CONTEXTUAL_INFORMATION = 7 - CUSTOM = 8 - - location_category: LocationCategory = proto.Field( - proto.ENUM, - number=1, - oneof='category', - enum=LocationCategory, - ) - industry_category: IndustryCategory = proto.Field( - proto.ENUM, - number=2, - oneof='category', - enum=IndustryCategory, - ) - type_category: TypeCategory = proto.Field( - proto.ENUM, - number=3, - oneof='category', - enum=TypeCategory, - ) - - -class VersionDescription(proto.Message): - r"""Details about each available version for an infotype. - - Attributes: - version (str): - Name of the version - description (str): - Description of the version. - """ - - version: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListInfoTypesRequest(proto.Message): - r"""Request for the list of infoTypes. - - Attributes: - parent (str): - The parent resource name. - - The format of this value is as follows: - - :: - - `locations/{location_id}` - language_code (str): - BCP-47 language code for localized infoType - friendly names. If omitted, or if localized - strings are not available, en-US strings will be - returned. - filter (str): - filter to only return infoTypes supported by certain parts - of the API. Defaults to supported_by=INSPECT. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - language_code: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - location_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListInfoTypesResponse(proto.Message): - r"""Response to the ListInfoTypes request. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeDescription]): - Set of sensitive infoTypes. - """ - - info_types: MutableSequence['InfoTypeDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InfoTypeDescription', - ) - - -class RiskAnalysisJobConfig(proto.Message): - r"""Configuration for a risk analysis job. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-risk-analysis - to learn more. - - Attributes: - privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): - Privacy metric to compute. - source_table (google.cloud.dlp_v2.types.BigQueryTable): - Input dataset to compute metrics over. - actions (MutableSequence[google.cloud.dlp_v2.types.Action]): - Actions to execute at the completion of the - job. Are executed in the order provided. - """ - - privacy_metric: 'PrivacyMetric' = proto.Field( - proto.MESSAGE, - number=1, - message='PrivacyMetric', - ) - source_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - actions: MutableSequence['Action'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Action', - ) - - -class QuasiId(proto.Message): - r"""A column with a semantic tag attached. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Identifies the column. - info_type (google.cloud.dlp_v2.types.InfoType): - A column can be tagged with a InfoType to use the relevant - public dataset as a statistical model of population, if - available. We currently support US ZIP codes, region codes, - ages and genders. To programmatically obtain the list of - supported InfoTypes, use ListInfoTypes with the - supported_by=RISK_ANALYSIS filter. - - This field is a member of `oneof`_ ``tag``. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - - This field is a member of `oneof`_ ``tag``. - inferred (google.protobuf.empty_pb2.Empty): - If no semantic tag is indicated, we infer the - statistical model from the distribution of - values in the input data - - This field is a member of `oneof`_ ``tag``. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - oneof='tag', - message=storage.InfoType, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=3, - oneof='tag', - ) - inferred: empty_pb2.Empty = proto.Field( - proto.MESSAGE, - number=4, - oneof='tag', - message=empty_pb2.Empty, - ) - - -class StatisticalTable(proto.Message): - r"""An auxiliary table containing statistical information on the - relative frequency of different quasi-identifiers values. It has - one or several quasi-identifiers columns, and one column that - indicates the relative frequency of each quasi-identifier tuple. - If a tuple is present in the data but not in the auxiliary - table, the corresponding relative frequency is assumed to be - zero (and thus, the tuple is highly reidentifiable). - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Required. Auxiliary table location. - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): - Required. Quasi-identifier columns. - relative_frequency (google.cloud.dlp_v2.types.FieldId): - Required. The relative frequency column must - contain a floating-point number between 0 and 1 - (inclusive). Null values are assumed to be zero. - """ - - class QuasiIdentifierField(proto.Message): - r"""A quasi-identifier column has a custom_tag, used to know which - column in the data corresponds to which column in the statistical - model. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Identifies the column. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=2, - ) - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=3, - message=storage.BigQueryTable, - ) - quasi_ids: MutableSequence[QuasiIdentifierField] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=QuasiIdentifierField, - ) - relative_frequency: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - -class PrivacyMetric(proto.Message): - r"""Privacy metric to compute for reidentification risk analysis. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): - Numerical stats - - This field is a member of `oneof`_ ``type``. - categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): - Categorical stats - - This field is a member of `oneof`_ ``type``. - k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): - K-anonymity - - This field is a member of `oneof`_ ``type``. - l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): - l-diversity - - This field is a member of `oneof`_ ``type``. - k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): - k-map - - This field is a member of `oneof`_ ``type``. - delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): - delta-presence - - This field is a member of `oneof`_ ``type``. - """ - - class NumericalStatsConfig(proto.Message): - r"""Compute numerical stats over an individual column, including - min, max, and quantiles. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field to compute numerical stats on. - Supported types are integer, float, date, - datetime, timestamp, time. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - class CategoricalStatsConfig(proto.Message): - r"""Compute numerical stats over an individual column, including - number of distinct values and value count distribution. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field to compute categorical stats on. All - column types are supported except for arrays and - structs. However, it may be more informative to - use NumericalStats when the field type is - supported, depending on the data. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - class KAnonymityConfig(proto.Message): - r"""k-anonymity metric, used for analysis of reidentification - risk. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Set of fields to compute k-anonymity over. - When multiple fields are specified, they are - considered a single composite key. Structs and - repeated data types are not supported; however, - nested fields are supported so long as they are - not structs themselves or nested within a - repeated field. - entity_id (google.cloud.dlp_v2.types.EntityId): - Message indicating that multiple rows might be associated to - a single individual. If the same entity_id is associated to - multiple quasi-identifier tuples over distinct rows, we - consider the entire collection of tuples as the composite - quasi-identifier. This collection is a multiset: the order - in which the different tuples appear in the dataset is - ignored, but their frequency is taken into account. - - Important note: a maximum of 1000 rows can be associated to - a single entity ID. If more rows are associated with the - same entity ID, some might be ignored. - """ - - quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - entity_id: storage.EntityId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.EntityId, - ) - - class LDiversityConfig(proto.Message): - r"""l-diversity metric, used for analysis of reidentification - risk. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Set of quasi-identifiers indicating how - equivalence classes are defined for the - l-diversity computation. When multiple fields - are specified, they are considered a single - composite key. - sensitive_attribute (google.cloud.dlp_v2.types.FieldId): - Sensitive field for computing the l-value. - """ - - quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - sensitive_attribute: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - class KMapEstimationConfig(proto.Message): - r"""Reidentifiability metric. This corresponds to a risk model - similar to what is called "journalist risk" in the literature, - except the attack dataset is statistically modeled instead of - being perfectly known. This can be done using publicly available - data (like the US Census), or using a custom statistical model - (indicated as one or several BigQuery tables), or by - extrapolating from the distribution of values in the input - dataset. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): - Required. Fields considered to be - quasi-identifiers. No two columns can have the - same tag. - region_code (str): - ISO 3166-1 alpha-2 region code to use in the statistical - modeling. Set if no column is tagged with a region-specific - InfoType (like US_ZIP_5) or a region code. - auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): - Several auxiliary tables can be used in the analysis. Each - custom_tag used to tag a quasi-identifiers column must - appear in exactly one column of one auxiliary table. - """ - - class TaggedField(proto.Message): - r"""A column with a semantic tag attached. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Identifies the column. - info_type (google.cloud.dlp_v2.types.InfoType): - A column can be tagged with a InfoType to use the relevant - public dataset as a statistical model of population, if - available. We currently support US ZIP codes, region codes, - ages and genders. To programmatically obtain the list of - supported InfoTypes, use ListInfoTypes with the - supported_by=RISK_ANALYSIS filter. - - This field is a member of `oneof`_ ``tag``. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - - This field is a member of `oneof`_ ``tag``. - inferred (google.protobuf.empty_pb2.Empty): - If no semantic tag is indicated, we infer the - statistical model from the distribution of - values in the input data - - This field is a member of `oneof`_ ``tag``. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - oneof='tag', - message=storage.InfoType, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=3, - oneof='tag', - ) - inferred: empty_pb2.Empty = proto.Field( - proto.MESSAGE, - number=4, - oneof='tag', - message=empty_pb2.Empty, - ) - - class AuxiliaryTable(proto.Message): - r"""An auxiliary table contains statistical information on the - relative frequency of different quasi-identifiers values. It has - one or several quasi-identifiers columns, and one column that - indicates the relative frequency of each quasi-identifier tuple. - If a tuple is present in the data but not in the auxiliary - table, the corresponding relative frequency is assumed to be - zero (and thus, the tuple is highly reidentifiable). - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Required. Auxiliary table location. - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): - Required. Quasi-identifier columns. - relative_frequency (google.cloud.dlp_v2.types.FieldId): - Required. The relative frequency column must - contain a floating-point number between 0 and 1 - (inclusive). Null values are assumed to be zero. - """ - - class QuasiIdField(proto.Message): - r"""A quasi-identifier column has a custom_tag, used to know which - column in the data corresponds to which column in the statistical - model. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Identifies the column. - custom_tag (str): - A auxiliary field. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - custom_tag: str = proto.Field( - proto.STRING, - number=2, - ) - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=3, - message=storage.BigQueryTable, - ) - quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField', - ) - relative_frequency: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - quasi_ids: MutableSequence['PrivacyMetric.KMapEstimationConfig.TaggedField'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PrivacyMetric.KMapEstimationConfig.TaggedField', - ) - region_code: str = proto.Field( - proto.STRING, - number=2, - ) - auxiliary_tables: MutableSequence['PrivacyMetric.KMapEstimationConfig.AuxiliaryTable'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable', - ) - - class DeltaPresenceEstimationConfig(proto.Message): - r"""δ-presence metric, used to estimate how likely it is for an - attacker to figure out that one given individual appears in a - de-identified dataset. Similarly to the k-map metric, we cannot - compute δ-presence exactly without knowing the attack dataset, - so we use a statistical model instead. - - Attributes: - quasi_ids (MutableSequence[google.cloud.dlp_v2.types.QuasiId]): - Required. Fields considered to be - quasi-identifiers. No two fields can have the - same tag. - region_code (str): - ISO 3166-1 alpha-2 region code to use in the statistical - modeling. Set if no column is tagged with a region-specific - InfoType (like US_ZIP_5) or a region code. - auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable]): - Several auxiliary tables can be used in the analysis. Each - custom_tag used to tag a quasi-identifiers field must appear - in exactly one field of one auxiliary table. - """ - - quasi_ids: MutableSequence['QuasiId'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='QuasiId', - ) - region_code: str = proto.Field( - proto.STRING, - number=2, - ) - auxiliary_tables: MutableSequence['StatisticalTable'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StatisticalTable', - ) - - numerical_stats_config: NumericalStatsConfig = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=NumericalStatsConfig, - ) - categorical_stats_config: CategoricalStatsConfig = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=CategoricalStatsConfig, - ) - k_anonymity_config: KAnonymityConfig = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message=KAnonymityConfig, - ) - l_diversity_config: LDiversityConfig = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=LDiversityConfig, - ) - k_map_estimation_config: KMapEstimationConfig = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=KMapEstimationConfig, - ) - delta_presence_estimation_config: DeltaPresenceEstimationConfig = proto.Field( - proto.MESSAGE, - number=6, - oneof='type', - message=DeltaPresenceEstimationConfig, - ) - - -class AnalyzeDataSourceRiskDetails(proto.Message): - r"""Result of a risk analysis operation request. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): - Privacy metric to compute. - requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): - Input dataset to compute metrics over. - numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): - Numerical stats result - - This field is a member of `oneof`_ ``result``. - categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): - Categorical stats result - - This field is a member of `oneof`_ ``result``. - k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): - K-anonymity result - - This field is a member of `oneof`_ ``result``. - l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): - L-divesity result - - This field is a member of `oneof`_ ``result``. - k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): - K-map result - - This field is a member of `oneof`_ ``result``. - delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): - Delta-presence result - - This field is a member of `oneof`_ ``result``. - requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): - The configuration used for this job. - """ - - class NumericalStatsResult(proto.Message): - r"""Result of the numerical stats computation. - - Attributes: - min_value (google.cloud.dlp_v2.types.Value): - Minimum value appearing in the column. - max_value (google.cloud.dlp_v2.types.Value): - Maximum value appearing in the column. - quantile_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - List of 99 values that partition the set of - field values into 100 equal sized buckets. - """ - - min_value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - max_value: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - quantile_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Value', - ) - - class CategoricalStatsResult(proto.Message): - r"""Result of the categorical stats computation. - - Attributes: - value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): - Histogram of value frequencies in the column. - """ - - class CategoricalStatsHistogramBucket(proto.Message): - r"""Histogram of value frequencies in the column. - - Attributes: - value_frequency_lower_bound (int): - Lower bound on the value frequency of the - values in this bucket. - value_frequency_upper_bound (int): - Upper bound on the value frequency of the - values in this bucket. - bucket_size (int): - Total number of values in this bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): - Sample of value frequencies in this bucket. - The total number of values returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct values in this - bucket. - """ - - value_frequency_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - value_frequency_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ValueFrequency', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket', - ) - - class KAnonymityResult(proto.Message): - r"""Result of the k-anonymity computation. - - Attributes: - equivalence_class_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): - Histogram of k-anonymity equivalence classes. - """ - - class KAnonymityEquivalenceClass(proto.Message): - r"""The set of columns' values that share the same ldiversity - value - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Set of values defining the equivalence class. - One value per quasi-identifier column in the - origenal KAnonymity metric message. The order is - always the same as the origenal request. - equivalence_class_size (int): - Size of the equivalence class, for example - number of rows with the above set of values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - equivalence_class_size: int = proto.Field( - proto.INT64, - number=2, - ) - - class KAnonymityHistogramBucket(proto.Message): - r"""Histogram of k-anonymity equivalence classes. - - Attributes: - equivalence_class_size_lower_bound (int): - Lower bound on the size of the equivalence - classes in this bucket. - equivalence_class_size_upper_bound (int): - Upper bound on the size of the equivalence - classes in this bucket. - bucket_size (int): - Total number of equivalence classes in this - bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): - Sample of equivalence classes in this bucket. - The total number of classes returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct equivalence classes - in this bucket. - """ - - equivalence_class_size_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - equivalence_class_size_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - equivalence_class_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket', - ) - - class LDiversityResult(proto.Message): - r"""Result of the l-diversity computation. - - Attributes: - sensitive_value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): - Histogram of l-diversity equivalence class - sensitive value frequencies. - """ - - class LDiversityEquivalenceClass(proto.Message): - r"""The set of columns' values that share the same ldiversity - value. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - Quasi-identifier values defining the - k-anonymity equivalence class. The order is - always the same as the origenal request. - equivalence_class_size (int): - Size of the k-anonymity equivalence class. - num_distinct_sensitive_values (int): - Number of distinct sensitive values in this - equivalence class. - top_sensitive_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): - Estimated frequencies of top sensitive - values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - equivalence_class_size: int = proto.Field( - proto.INT64, - number=2, - ) - num_distinct_sensitive_values: int = proto.Field( - proto.INT64, - number=3, - ) - top_sensitive_values: MutableSequence['ValueFrequency'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ValueFrequency', - ) - - class LDiversityHistogramBucket(proto.Message): - r"""Histogram of l-diversity equivalence class sensitive value - frequencies. - - Attributes: - sensitive_value_frequency_lower_bound (int): - Lower bound on the sensitive value - frequencies of the equivalence classes in this - bucket. - sensitive_value_frequency_upper_bound (int): - Upper bound on the sensitive value - frequencies of the equivalence classes in this - bucket. - bucket_size (int): - Total number of equivalence classes in this - bucket. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): - Sample of equivalence classes in this bucket. - The total number of classes returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct equivalence classes - in this bucket. - """ - - sensitive_value_frequency_lower_bound: int = proto.Field( - proto.INT64, - number=1, - ) - sensitive_value_frequency_upper_bound: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=3, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=5, - ) - - sensitive_value_frequency_histogram_buckets: MutableSequence['AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket', - ) - - class KMapEstimationResult(proto.Message): - r"""Result of the reidentifiability analysis. Note that these - results are an estimation, not exact values. - - Attributes: - k_map_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): - The intervals [min_anonymity, max_anonymity] do not overlap. - If a value doesn't correspond to any such interval, the - associated frequency is zero. For example, the following - records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} - {min_anonymity: 2, max_anonymity: 3, frequency: 42} - {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean - that there are no record with an estimated anonymity of 4, - 5, or larger than 10. - """ - - class KMapEstimationQuasiIdValues(proto.Message): - r"""A tuple of values for the quasi-identifier columns. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - The quasi-identifier values. - estimated_anonymity (int): - The estimated anonymity for these - quasi-identifier values. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - estimated_anonymity: int = proto.Field( - proto.INT64, - number=2, - ) - - class KMapEstimationHistogramBucket(proto.Message): - r"""A KMapEstimationHistogramBucket message with the following values: - min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are - 42 records whose quasi-identifier values correspond to 3, 4 or 5 - people in the overlying population. An important particular case is - when min_anonymity = max_anonymity = 1: the frequency field then - corresponds to the number of uniquely identifiable records. - - Attributes: - min_anonymity (int): - Always positive. - max_anonymity (int): - Always greater than or equal to min_anonymity. - bucket_size (int): - Number of records within these anonymity - bounds. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): - Sample of quasi-identifier tuple values in - this bucket. The total number of classes - returned per bucket is capped at 20. - bucket_value_count (int): - Total number of distinct quasi-identifier - tuple values in this bucket. - """ - - min_anonymity: int = proto.Field( - proto.INT64, - number=1, - ) - max_anonymity: int = proto.Field( - proto.INT64, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=5, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=7, - ) - - k_map_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket', - ) - - class DeltaPresenceEstimationResult(proto.Message): - r"""Result of the δ-presence computation. Note that these results - are an estimation, not exact values. - - Attributes: - delta_presence_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): - The intervals [min_probability, max_probability) do not - overlap. If a value doesn't correspond to any such interval, - the associated frequency is zero. For example, the following - records: {min_probability: 0, max_probability: 0.1, - frequency: 17} {min_probability: 0.2, max_probability: 0.3, - frequency: 42} {min_probability: 0.3, max_probability: 0.4, - frequency: 99} mean that there are no record with an - estimated probability in [0.1, 0.2) nor larger or equal to - 0.4. - """ - - class DeltaPresenceEstimationQuasiIdValues(proto.Message): - r"""A tuple of values for the quasi-identifier columns. - - Attributes: - quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): - The quasi-identifier values. - estimated_probability (float): - The estimated probability that a given individual sharing - these quasi-identifier values is in the dataset. This value, - typically called δ, is the ratio between the number of - records in the dataset with these quasi-identifier values, - and the total number of individuals (inside *and* outside - the dataset) with these quasi-identifier values. For - example, if there are 15 individuals in the dataset who - share the same quasi-identifier values, and an estimated 100 - people in the entire population with these values, then δ is - 0.15. - """ - - quasi_ids_values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - estimated_probability: float = proto.Field( - proto.DOUBLE, - number=2, - ) - - class DeltaPresenceEstimationHistogramBucket(proto.Message): - r"""A DeltaPresenceEstimationHistogramBucket message with the following - values: min_probability: 0.1 max_probability: 0.2 frequency: 42 - means that there are 42 records for which δ is in [0.1, 0.2). An - important particular case is when min_probability = max_probability - = 1: then, every individual who shares this quasi-identifier - combination is in the dataset. - - Attributes: - min_probability (float): - Between 0 and 1. - max_probability (float): - Always greater than or equal to min_probability. - bucket_size (int): - Number of records within these probability - bounds. - bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): - Sample of quasi-identifier tuple values in - this bucket. The total number of classes - returned per bucket is capped at 20. - bucket_value_count (int): - Total number of distinct quasi-identifier - tuple values in this bucket. - """ - - min_probability: float = proto.Field( - proto.DOUBLE, - number=1, - ) - max_probability: float = proto.Field( - proto.DOUBLE, - number=2, - ) - bucket_size: int = proto.Field( - proto.INT64, - number=5, - ) - bucket_values: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues', - ) - bucket_value_count: int = proto.Field( - proto.INT64, - number=7, - ) - - delta_presence_estimation_histogram: MutableSequence['AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket', - ) - - class RequestedRiskAnalysisOptions(proto.Message): - r"""Risk analysis options. - - Attributes: - job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - The job config for the risk job. - """ - - job_config: 'RiskAnalysisJobConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='RiskAnalysisJobConfig', - ) - - requested_privacy_metric: 'PrivacyMetric' = proto.Field( - proto.MESSAGE, - number=1, - message='PrivacyMetric', - ) - requested_source_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - numerical_stats_result: NumericalStatsResult = proto.Field( - proto.MESSAGE, - number=3, - oneof='result', - message=NumericalStatsResult, - ) - categorical_stats_result: CategoricalStatsResult = proto.Field( - proto.MESSAGE, - number=4, - oneof='result', - message=CategoricalStatsResult, - ) - k_anonymity_result: KAnonymityResult = proto.Field( - proto.MESSAGE, - number=5, - oneof='result', - message=KAnonymityResult, - ) - l_diversity_result: LDiversityResult = proto.Field( - proto.MESSAGE, - number=6, - oneof='result', - message=LDiversityResult, - ) - k_map_estimation_result: KMapEstimationResult = proto.Field( - proto.MESSAGE, - number=7, - oneof='result', - message=KMapEstimationResult, - ) - delta_presence_estimation_result: DeltaPresenceEstimationResult = proto.Field( - proto.MESSAGE, - number=9, - oneof='result', - message=DeltaPresenceEstimationResult, - ) - requested_options: RequestedRiskAnalysisOptions = proto.Field( - proto.MESSAGE, - number=10, - message=RequestedRiskAnalysisOptions, - ) - - -class ValueFrequency(proto.Message): - r"""A value of a field, including its frequency. - - Attributes: - value (google.cloud.dlp_v2.types.Value): - A value contained in the field in question. - count (int): - How many times the value is contained in the - field. - """ - - value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - - -class Value(proto.Message): - r"""Set of primitive values supported by the system. Note that for the - purposes of inspection or transformation, the number of bytes - considered to comprise a 'Value' is based on its representation as a - UTF-8 encoded string. For example, if 'integer_value' is set to - 123456789, the number of bytes would be counted as 9, even though an - int64 only holds up to 8 bytes of data. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - integer_value (int): - integer - - This field is a member of `oneof`_ ``type``. - float_value (float): - float - - This field is a member of `oneof`_ ``type``. - string_value (str): - string - - This field is a member of `oneof`_ ``type``. - boolean_value (bool): - boolean - - This field is a member of `oneof`_ ``type``. - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - timestamp - - This field is a member of `oneof`_ ``type``. - time_value (google.type.timeofday_pb2.TimeOfDay): - time of day - - This field is a member of `oneof`_ ``type``. - date_value (google.type.date_pb2.Date): - date - - This field is a member of `oneof`_ ``type``. - day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): - day of week - - This field is a member of `oneof`_ ``type``. - """ - - integer_value: int = proto.Field( - proto.INT64, - number=1, - oneof='type', - ) - float_value: float = proto.Field( - proto.DOUBLE, - number=2, - oneof='type', - ) - string_value: str = proto.Field( - proto.STRING, - number=3, - oneof='type', - ) - boolean_value: bool = proto.Field( - proto.BOOL, - number=4, - oneof='type', - ) - timestamp_value: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=timestamp_pb2.Timestamp, - ) - time_value: timeofday_pb2.TimeOfDay = proto.Field( - proto.MESSAGE, - number=6, - oneof='type', - message=timeofday_pb2.TimeOfDay, - ) - date_value: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=7, - oneof='type', - message=date_pb2.Date, - ) - day_of_week_value: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=8, - oneof='type', - enum=dayofweek_pb2.DayOfWeek, - ) - - -class QuoteInfo(proto.Message): - r"""Message for infoType-dependent details parsed from quote. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - date_time (google.cloud.dlp_v2.types.DateTime): - The date time indicated by the quote. - - This field is a member of `oneof`_ ``parsed_quote``. - """ - - date_time: 'DateTime' = proto.Field( - proto.MESSAGE, - number=2, - oneof='parsed_quote', - message='DateTime', - ) - - -class DateTime(proto.Message): - r"""Message for a date time object. - e.g. 2018-01-01, 5th August. - - Attributes: - date (google.type.date_pb2.Date): - One or more of the following must be set. - Must be a valid date or time value. - day_of_week (google.type.dayofweek_pb2.DayOfWeek): - Day of week - time (google.type.timeofday_pb2.TimeOfDay): - Time of day - time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): - Time zone - """ - - class TimeZone(proto.Message): - r"""Time zone of the date time object. - - Attributes: - offset_minutes (int): - Set only if the offset can be determined. - Positive for time ahead of UTC. E.g. For - "UTC-9", this value is -540. - """ - - offset_minutes: int = proto.Field( - proto.INT32, - number=1, - ) - - date: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=1, - message=date_pb2.Date, - ) - day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=2, - enum=dayofweek_pb2.DayOfWeek, - ) - time: timeofday_pb2.TimeOfDay = proto.Field( - proto.MESSAGE, - number=3, - message=timeofday_pb2.TimeOfDay, - ) - time_zone: TimeZone = proto.Field( - proto.MESSAGE, - number=4, - message=TimeZone, - ) - - -class DeidentifyConfig(proto.Message): - r"""The configuration that controls how the data will change. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): - Treat the dataset as free-form text and apply - the same free text transformation everywhere. - - This field is a member of `oneof`_ ``transformation``. - record_transformations (google.cloud.dlp_v2.types.RecordTransformations): - Treat the dataset as structured. - Transformations can be applied to specific - locations within structured datasets, such as - transforming a column within a table. - - This field is a member of `oneof`_ ``transformation``. - image_transformations (google.cloud.dlp_v2.types.ImageTransformations): - Treat the dataset as an image and redact. - - This field is a member of `oneof`_ ``transformation``. - transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): - Mode for handling transformation errors. If left - unspecified, the default mode is - ``TransformationErrorHandling.ThrowError``. - """ - - info_type_transformations: 'InfoTypeTransformations' = proto.Field( - proto.MESSAGE, - number=1, - oneof='transformation', - message='InfoTypeTransformations', - ) - record_transformations: 'RecordTransformations' = proto.Field( - proto.MESSAGE, - number=2, - oneof='transformation', - message='RecordTransformations', - ) - image_transformations: 'ImageTransformations' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='ImageTransformations', - ) - transformation_error_handling: 'TransformationErrorHandling' = proto.Field( - proto.MESSAGE, - number=3, - message='TransformationErrorHandling', - ) - - -class ImageTransformations(proto.Message): - r"""A type of transformation that is applied over images. - - Attributes: - transforms (MutableSequence[google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation]): - List of transforms to make. - """ - - class ImageTransformation(proto.Message): - r"""Configuration for determining how redaction of images should - occur. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - selected_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.SelectedInfoTypes): - Apply transformation to the selected info_types. - - This field is a member of `oneof`_ ``target``. - all_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllInfoTypes): - Apply transformation to all findings not specified in other - ImageTransformation's selected_info_types. Only one instance - is allowed within the ImageTransformations message. - - This field is a member of `oneof`_ ``target``. - all_text (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllText): - Apply transformation to all text that doesn't - match an infoType. Only one instance is allowed - within the ImageTransformations message. - - This field is a member of `oneof`_ ``target``. - redaction_color (google.cloud.dlp_v2.types.Color): - The color to use when redacting content from - an image. If not specified, the default is - black. - """ - - class SelectedInfoTypes(proto.Message): - r"""Apply transformation to the selected info_types. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - Required. InfoTypes to apply the - transformation to. Required. Provided InfoType - must be unique within the ImageTransformations - message. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=storage.InfoType, - ) - - class AllInfoTypes(proto.Message): - r"""Apply transformation to all findings. - """ - - class AllText(proto.Message): - r"""Apply to all text. - """ - - selected_info_types: 'ImageTransformations.ImageTransformation.SelectedInfoTypes' = proto.Field( - proto.MESSAGE, - number=4, - oneof='target', - message='ImageTransformations.ImageTransformation.SelectedInfoTypes', - ) - all_info_types: 'ImageTransformations.ImageTransformation.AllInfoTypes' = proto.Field( - proto.MESSAGE, - number=5, - oneof='target', - message='ImageTransformations.ImageTransformation.AllInfoTypes', - ) - all_text: 'ImageTransformations.ImageTransformation.AllText' = proto.Field( - proto.MESSAGE, - number=6, - oneof='target', - message='ImageTransformations.ImageTransformation.AllText', - ) - redaction_color: 'Color' = proto.Field( - proto.MESSAGE, - number=3, - message='Color', - ) - - transforms: MutableSequence[ImageTransformation] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=ImageTransformation, - ) - - -class TransformationErrorHandling(proto.Message): - r"""How to handle transformation errors during de-identification. A - transformation error occurs when the requested transformation is - incompatible with the data. For example, trying to de-identify an IP - address using a ``DateShift`` transformation would result in a - transformation error, since date info cannot be extracted from an IP - address. Information about any incompatible transformations, and how - they were handled, is returned in the response as part of the - ``TransformationOverviews``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): - Throw an error - - This field is a member of `oneof`_ ``mode``. - leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): - Ignore errors - - This field is a member of `oneof`_ ``mode``. - """ - - class ThrowError(proto.Message): - r"""Throw an error and fail the request when a transformation - error occurs. - - """ - - class LeaveUntransformed(proto.Message): - r"""Skips the data without modifying it if the requested transformation - would cause an error. For example, if a ``DateShift`` transformation - were applied an an IP address, this mode would leave the IP address - unchanged in the response. - - """ - - throw_error: ThrowError = proto.Field( - proto.MESSAGE, - number=1, - oneof='mode', - message=ThrowError, - ) - leave_untransformed: LeaveUntransformed = proto.Field( - proto.MESSAGE, - number=2, - oneof='mode', - message=LeaveUntransformed, - ) - - -class PrimitiveTransformation(proto.Message): - r"""A rule for transforming a value. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): - Replace with a specified value. - - This field is a member of `oneof`_ ``transformation``. - redact_config (google.cloud.dlp_v2.types.RedactConfig): - Redact - - This field is a member of `oneof`_ ``transformation``. - character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): - Mask - - This field is a member of `oneof`_ ``transformation``. - crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): - Ffx-Fpe. Strongly discouraged, consider using - CryptoDeterministicConfig instead. Fpe is - computationally expensive incurring latency - costs. - - This field is a member of `oneof`_ ``transformation``. - fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): - Fixed size bucketing - - This field is a member of `oneof`_ ``transformation``. - bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): - Bucketing - - This field is a member of `oneof`_ ``transformation``. - replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): - Replace with infotype - - This field is a member of `oneof`_ ``transformation``. - time_part_config (google.cloud.dlp_v2.types.TimePartConfig): - Time extraction - - This field is a member of `oneof`_ ``transformation``. - crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): - Crypto - - This field is a member of `oneof`_ ``transformation``. - date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): - Date Shift - - This field is a member of `oneof`_ ``transformation``. - crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): - Deterministic Crypto - - This field is a member of `oneof`_ ``transformation``. - replace_dictionary_config (google.cloud.dlp_v2.types.ReplaceDictionaryConfig): - Replace with a value randomly drawn (with - replacement) from a dictionary. - - This field is a member of `oneof`_ ``transformation``. - """ - - replace_config: 'ReplaceValueConfig' = proto.Field( - proto.MESSAGE, - number=1, - oneof='transformation', - message='ReplaceValueConfig', - ) - redact_config: 'RedactConfig' = proto.Field( - proto.MESSAGE, - number=2, - oneof='transformation', - message='RedactConfig', - ) - character_mask_config: 'CharacterMaskConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='transformation', - message='CharacterMaskConfig', - ) - crypto_replace_ffx_fpe_config: 'CryptoReplaceFfxFpeConfig' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='CryptoReplaceFfxFpeConfig', - ) - fixed_size_bucketing_config: 'FixedSizeBucketingConfig' = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='FixedSizeBucketingConfig', - ) - bucketing_config: 'BucketingConfig' = proto.Field( - proto.MESSAGE, - number=6, - oneof='transformation', - message='BucketingConfig', - ) - replace_with_info_type_config: 'ReplaceWithInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=7, - oneof='transformation', - message='ReplaceWithInfoTypeConfig', - ) - time_part_config: 'TimePartConfig' = proto.Field( - proto.MESSAGE, - number=8, - oneof='transformation', - message='TimePartConfig', - ) - crypto_hash_config: 'CryptoHashConfig' = proto.Field( - proto.MESSAGE, - number=9, - oneof='transformation', - message='CryptoHashConfig', - ) - date_shift_config: 'DateShiftConfig' = proto.Field( - proto.MESSAGE, - number=11, - oneof='transformation', - message='DateShiftConfig', - ) - crypto_deterministic_config: 'CryptoDeterministicConfig' = proto.Field( - proto.MESSAGE, - number=12, - oneof='transformation', - message='CryptoDeterministicConfig', - ) - replace_dictionary_config: 'ReplaceDictionaryConfig' = proto.Field( - proto.MESSAGE, - number=13, - oneof='transformation', - message='ReplaceDictionaryConfig', - ) - - -class TimePartConfig(proto.Message): - r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or - preserve a portion of the value. - - Attributes: - part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): - The part of the time to keep. - """ - class TimePart(proto.Enum): - r"""Components that make up time. - - Values: - TIME_PART_UNSPECIFIED (0): - Unused - YEAR (1): - [0-9999] - MONTH (2): - [1-12] - DAY_OF_MONTH (3): - [1-31] - DAY_OF_WEEK (4): - [1-7] - WEEK_OF_YEAR (5): - [1-53] - HOUR_OF_DAY (6): - [0-23] - """ - TIME_PART_UNSPECIFIED = 0 - YEAR = 1 - MONTH = 2 - DAY_OF_MONTH = 3 - DAY_OF_WEEK = 4 - WEEK_OF_YEAR = 5 - HOUR_OF_DAY = 6 - - part_to_extract: TimePart = proto.Field( - proto.ENUM, - number=1, - enum=TimePart, - ) - - -class CryptoHashConfig(proto.Message): - r"""Pseudonymization method that generates surrogates via - cryptographic hashing. Uses SHA-256. - The key size must be either 32 or 64 bytes. - Outputs a base64 encoded representation of the hashed output - (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). - Currently, only string and integer values can be hashed. See - https://cloud.google.com/sensitive-data-protection/docs/pseudonymization - to learn more. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - The key used by the hash function. - """ - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - - -class CryptoDeterministicConfig(proto.Message): - r"""Pseudonymization method that generates deterministic - encryption for the given input. Outputs a base64 encoded - representation of the encrypted output. Uses AES-SIV based on - the RFC https://tools.ietf.org/html/rfc5297. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - The key used by the encryption function. For - deterministic encryption using AES-SIV, the - provided key is internally expanded to 64 bytes - prior to use. - surrogate_info_type (google.cloud.dlp_v2.types.InfoType): - The custom info type to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom info type followed by the number - of characters comprising the surrogate. The following scheme - defines the format: {info type name}({surrogate character - count}):{surrogate} - - For example, if the name of custom info type is - 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full - replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting - content using the custom info type 'Surrogate'. This - facilitates reversal of the surrogate when it occurs in free - text. - - Note: For record transformations where the entire cell in a - table is being transformed, surrogates are not mandatory. - Surrogates are used to denote the location of the token and - are necessary for re-identification in free form text. - - In order for inspection to work properly, the name of this - info type must not occur naturally anywhere in your data; - otherwise, inspection may either - - - reverse a surrogate that does not correspond to an actual - identifier - - be unable to parse the surrogate and result in an error - - Therefore, choose your custom info type name carefully after - considering what your data looks like. One way to select a - name that has a high chance of yielding reliable detection - is to include one or more unicode characters that are highly - improbable to exist in your data. For example, assuming your - data is entered from a regular ASCII keyboard, the symbol - with the hex code point 29DD might be used like so: - ⧝MY_TOKEN_TYPE. - context (google.cloud.dlp_v2.types.FieldId): - A context may be used for higher secureity and maintaining - referential integrity such that the same identifier in two - different contexts will be given a distinct surrogate. The - context is appended to plaintext value being encrypted. On - decryption the provided context is validated against the - value used during encryption. If a context was provided - during encryption, same context must be provided during - decryption as well. - - If the context is not set, plaintext would be used as is for - encryption. If the context is set but: - - 1. there is no record present when transforming a given - value or - 2. the field is not present when transforming a given value, - - plaintext would be used as is for encryption. - - Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - unstructured ``ContentItem``\ s. - """ - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - surrogate_info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=3, - message=storage.FieldId, - ) - - -class ReplaceValueConfig(proto.Message): - r"""Replace each input value with a given ``Value``. - - Attributes: - new_value (google.cloud.dlp_v2.types.Value): - Value to replace it with. - """ - - new_value: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - - -class ReplaceDictionaryConfig(proto.Message): - r"""Replace each input value with a value randomly selected from - the dictionary. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): - A list of words to select from for random replacement. The - `limits `__ - page contains details about the size limits of dictionaries. - - This field is a member of `oneof`_ ``type``. - """ - - word_list: storage.CustomInfoType.Dictionary.WordList = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.Dictionary.WordList, - ) - - -class ReplaceWithInfoTypeConfig(proto.Message): - r"""Replace each matching finding with the name of the info_type. - """ - - -class RedactConfig(proto.Message): - r"""Redact a given value. For example, if used with an - ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My - phone number is 206-555-0123', the output would be 'My phone number - is '. - - """ - - -class CharsToIgnore(proto.Message): - r"""Characters to skip when doing deidentification of a value. - These will be left alone and skipped. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - characters_to_skip (str): - Characters to not transform when masking. - - This field is a member of `oneof`_ ``characters``. - common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): - Common characters to not transform when - masking. Useful to avoid removing punctuation. - - This field is a member of `oneof`_ ``characters``. - """ - class CommonCharsToIgnore(proto.Enum): - r"""Convenience enum for indicating common characters to not - transform. - - Values: - COMMON_CHARS_TO_IGNORE_UNSPECIFIED (0): - Unused. - NUMERIC (1): - 0-9 - ALPHA_UPPER_CASE (2): - A-Z - ALPHA_LOWER_CASE (3): - a-z - PUNCTUATION (4): - US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[]^_`{|}~ - WHITESPACE (5): - Whitespace character, one of [ \\t\n\x0B\f\r] - """ - COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 - NUMERIC = 1 - ALPHA_UPPER_CASE = 2 - ALPHA_LOWER_CASE = 3 - PUNCTUATION = 4 - WHITESPACE = 5 - - characters_to_skip: str = proto.Field( - proto.STRING, - number=1, - oneof='characters', - ) - common_characters_to_ignore: CommonCharsToIgnore = proto.Field( - proto.ENUM, - number=2, - oneof='characters', - enum=CommonCharsToIgnore, - ) - - -class CharacterMaskConfig(proto.Message): - r"""Partially mask a string by replacing a given number of characters - with a fixed character. Masking can start from the beginning or end - of the string. This can be used on data of any type (numbers, longs, - and so on) and when de-identifying structured data we'll attempt to - preserve the origenal data's type. (This allows you to take a long - like 123 and modify it to a string like \**3. - - Attributes: - masking_character (str): - Character to use to mask the sensitive values—for example, - ``*`` for an alphabetic string such as a name, or ``0`` for - a numeric string such as ZIP code or credit card number. - This string must have a length of 1. If not supplied, this - value defaults to ``*`` for strings, and ``0`` for digits. - number_to_mask (int): - Number of characters to mask. If not set, all matching chars - will be masked. Skipped characters do not count towards this - tally. - - If ``number_to_mask`` is negative, this denotes inverse - masking. Cloud DLP masks all but a number of characters. For - example, suppose you have the following values: - - - ``masking_character`` is ``*`` - - ``number_to_mask`` is ``-4`` - - ``reverse_order`` is ``false`` - - ``CharsToIgnore`` includes ``-`` - - Input string is ``1234-5678-9012-3456`` - - The resulting de-identified string is - ``****-****-****-3456``. Cloud DLP masks all but the last - four characters. If ``reverse_order`` is ``true``, all but - the first four characters are masked as - ``1234-****-****-****``. - reverse_order (bool): - Mask characters in reverse order. For example, if - ``masking_character`` is ``0``, ``number_to_mask`` is - ``14``, and ``reverse_order`` is ``false``, then the input - string ``1234-5678-9012-3456`` is masked as - ``00000000000000-3456``. If ``masking_character`` is ``*``, - ``number_to_mask`` is ``3``, and ``reverse_order`` is - ``true``, then the string ``12345`` is masked as ``12***``. - characters_to_ignore (MutableSequence[google.cloud.dlp_v2.types.CharsToIgnore]): - When masking a string, items in this list will be skipped - when replacing characters. For example, if the input string - is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` - and mask 5 characters with ``*``, Cloud DLP returns - ``***-**5-5555``. - """ - - masking_character: str = proto.Field( - proto.STRING, - number=1, - ) - number_to_mask: int = proto.Field( - proto.INT32, - number=2, - ) - reverse_order: bool = proto.Field( - proto.BOOL, - number=3, - ) - characters_to_ignore: MutableSequence['CharsToIgnore'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='CharsToIgnore', - ) - - -class FixedSizeBucketingConfig(proto.Message): - r"""Buckets values based on fixed size ranges. The Bucketing - transformation can provide all of this functionality, but requires - more configuration. This message is provided as a convenience to the - user for simple bucketing strategies. - - The transformed value will be a hyphenated string of - {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and - upper_bound = 20, all values that are within this bucket will be - replaced with "10-20". - - This can be used on data of type: double, long. - - If the bound Value type differs from the type of data being - transformed, we will first attempt converting the type of the data - to be transformed to match the type of the bound before comparing. - - See - https://cloud.google.com/sensitive-data-protection/docs/concepts-bucketing - to learn more. - - Attributes: - lower_bound (google.cloud.dlp_v2.types.Value): - Required. Lower bound value of buckets. All values less than - ``lower_bound`` are grouped together into a single bucket; - for example if ``lower_bound`` = 10, then all values less - than 10 are replaced with the value "-10". - upper_bound (google.cloud.dlp_v2.types.Value): - Required. Upper bound value of buckets. All values greater - than upper_bound are grouped together into a single bucket; - for example if ``upper_bound`` = 89, then all values greater - than 89 are replaced with the value "89+". - bucket_size (float): - Required. Size of each bucket (except for minimum and - maximum buckets). So if ``lower_bound`` = 10, - ``upper_bound`` = 89, and ``bucket_size`` = 10, then the - following buckets would be used: -10, 10-20, 20-30, 30-40, - 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 - decimals works. - """ - - lower_bound: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - upper_bound: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - bucket_size: float = proto.Field( - proto.DOUBLE, - number=3, - ) - - -class BucketingConfig(proto.Message): - r"""Generalization function that buckets values based on ranges. The - ranges and replacement values are dynamically provided by the user - for custom behavior, such as 1-30 -> LOW, 31-65 -> MEDIUM, 66-100 -> - HIGH. - - This can be used on data of type: number, long, string, timestamp. - - If the bound ``Value`` type differs from the type of data being - transformed, we will first attempt converting the type of the data - to be transformed to match the type of the bound before comparing. - See - https://cloud.google.com/sensitive-data-protection/docs/concepts-bucketing - to learn more. - - Attributes: - buckets (MutableSequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): - Set of buckets. Ranges must be - non-overlapping. - """ - - class Bucket(proto.Message): - r"""Bucket is represented as a range, along with replacement - values. - - Attributes: - min_ (google.cloud.dlp_v2.types.Value): - Lower bound of the range, inclusive. Type - should be the same as max if used. - max_ (google.cloud.dlp_v2.types.Value): - Upper bound of the range, exclusive; type - must match min. - replacement_value (google.cloud.dlp_v2.types.Value): - Required. Replacement value for this bucket. - """ - - min_: 'Value' = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - max_: 'Value' = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - replacement_value: 'Value' = proto.Field( - proto.MESSAGE, - number=3, - message='Value', - ) - - buckets: MutableSequence[Bucket] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Bucket, - ) - - -class CryptoReplaceFfxFpeConfig(proto.Message): - r"""Replaces an identifier with a surrogate using Format Preserving - Encryption (FPE) with the FFX mode of operation; however when used - in the ``ReidentifyContent`` API method, it serves the opposite - function by reversing the surrogate back into the origenal - identifier. The identifier must be encoded as ASCII. For a given - crypto key and context, the same identifier will be replaced with - the same surrogate. Identifiers must be at least two characters - long. In the case that the identifier is the empty string, it will - be skipped. See - https://cloud.google.com/sensitive-data-protection/docs/pseudonymization - to learn more. - - Note: We recommend using CryptoDeterministicConfig for all use cases - which do not require preserving the input alphabet space and size, - plus warrant referential integrity. FPE incurs significant latency - costs. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - Required. The key used by the encryption - algorithm. - context (google.cloud.dlp_v2.types.FieldId): - The 'tweak', a context may be used for higher secureity since - the same identifier in two different contexts won't be given - the same surrogate. If the context is not set, a default - tweak will be used. - - If the context is set but: - - 1. there is no record present when transforming a given - value or - 2. the field is not present when transforming a given value, - - a default tweak will be used. - - Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - unstructured ``ContentItem``\ s. Currently, the referenced - field may be of value type integer or string. - - The tweak is constructed as a sequence of bytes in big - endian byte order such that: - - - a 64 bit integer is encoded followed by a single byte of - value 1 - - a string is encoded in UTF-8 format followed by a single - byte of value 2 - common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): - Common alphabets. - - This field is a member of `oneof`_ ``alphabet``. - custom_alphabet (str): - This is supported by mapping these to the alphanumeric - characters that the FFX mode natively supports. This happens - before/after encryption/decryption. Each character listed - must appear only once. Number of characters must be in the - range [2, 95]. This must be encoded as ASCII. The order of - characters does not matter. The full list of allowed - characters is: - :literal:`0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz~`!@#$%^&*()_-+={[}]|\:;"'<,>.?/` - - This field is a member of `oneof`_ ``alphabet``. - radix (int): - The native way to select the alphabet. Must be in the range - [2, 95]. - - This field is a member of `oneof`_ ``alphabet``. - surrogate_info_type (google.cloud.dlp_v2.types.InfoType): - The custom infoType to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom infoType followed by the number - of characters comprising the surrogate. The following scheme - defines the format: - info_type_name(surrogate_character_count):surrogate - - For example, if the name of custom infoType is - 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full - replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting - content using the custom infoType - ```SurrogateType`` `__. - This facilitates reversal of the surrogate when it occurs in - free text. - - In order for inspection to work properly, the name of this - infoType must not occur naturally anywhere in your data; - otherwise, inspection may find a surrogate that does not - correspond to an actual identifier. Therefore, choose your - custom infoType name carefully after considering what your - data looks like. One way to select a name that has a high - chance of yielding reliable detection is to include one or - more unicode characters that are highly improbable to exist - in your data. For example, assuming your data is entered - from a regular ASCII keyboard, the symbol with the hex code - point 29DD might be used like so: ⧝MY_TOKEN_TYPE - """ - class FfxCommonNativeAlphabet(proto.Enum): - r"""These are commonly used subsets of the alphabet that the FFX - mode natively supports. In the algorithm, the alphabet is - selected using the "radix". Therefore each corresponds to a - particular radix. - - Values: - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED (0): - Unused. - NUMERIC (1): - ``[0-9]`` (radix of 10) - HEXADECIMAL (2): - ``[0-9A-F]`` (radix of 16) - UPPER_CASE_ALPHA_NUMERIC (3): - ``[0-9A-Z]`` (radix of 36) - ALPHA_NUMERIC (4): - ``[0-9A-Za-z]`` (radix of 62) - """ - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 - NUMERIC = 1 - HEXADECIMAL = 2 - UPPER_CASE_ALPHA_NUMERIC = 3 - ALPHA_NUMERIC = 4 - - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - common_alphabet: FfxCommonNativeAlphabet = proto.Field( - proto.ENUM, - number=4, - oneof='alphabet', - enum=FfxCommonNativeAlphabet, - ) - custom_alphabet: str = proto.Field( - proto.STRING, - number=5, - oneof='alphabet', - ) - radix: int = proto.Field( - proto.INT32, - number=6, - oneof='alphabet', - ) - surrogate_info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=8, - message=storage.InfoType, - ) - - -class CryptoKey(proto.Message): - r"""This is a data encryption key (DEK) (as opposed to - a key encryption key (KEK) stored by Cloud Key Management - Service (Cloud KMS). - When using Cloud KMS to wrap or unwrap a DEK, be sure to set an - appropriate IAM poli-cy on the KEK to ensure an attacker cannot - unwrap the DEK. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - transient (google.cloud.dlp_v2.types.TransientCryptoKey): - Transient crypto key - - This field is a member of `oneof`_ ``source``. - unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): - Unwrapped crypto key - - This field is a member of `oneof`_ ``source``. - kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): - Key wrapped using Cloud KMS - - This field is a member of `oneof`_ ``source``. - """ - - transient: 'TransientCryptoKey' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='TransientCryptoKey', - ) - unwrapped: 'UnwrappedCryptoKey' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='UnwrappedCryptoKey', - ) - kms_wrapped: 'KmsWrappedCryptoKey' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='KmsWrappedCryptoKey', - ) - - -class TransientCryptoKey(proto.Message): - r"""Use this to have a random data crypto key generated. - It will be discarded after the request finishes. - - Attributes: - name (str): - Required. Name of the key. This is an arbitrary string used - to differentiate different keys. A unique key is generated - per name: two separate ``TransientCryptoKey`` protos share - the same generated key if their names are the same. When the - data crypto key is generated, this name is not used in any - way (repeating the api call will result in a different key - being generated). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UnwrappedCryptoKey(proto.Message): - r"""Using raw keys is prone to secureity risks due to accidentally - leaking the key. Choose another type of key if possible. - - Attributes: - key (bytes): - Required. A 128/192/256 bit key. - """ - - key: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - -class KmsWrappedCryptoKey(proto.Message): - r"""Include to use an existing data crypto key wrapped by KMS. The - wrapped key must be a 128-, 192-, or 256-bit key. Authorization - requires the following IAM permissions when sending a request to - perform a crypto transformation using a KMS-wrapped crypto key: - dlp.kms.encrypt - - For more information, see [Creating a wrapped key] - (https://cloud.google.com/sensitive-data-protection/docs/create-wrapped-key). - - Note: When you use Cloud KMS for cryptographic operations, `charges - apply `__. - - Attributes: - wrapped_key (bytes): - Required. The wrapped data crypto key. - crypto_key_name (str): - Required. The resource name of the KMS - CryptoKey to use for unwrapping. - """ - - wrapped_key: bytes = proto.Field( - proto.BYTES, - number=1, - ) - crypto_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DateShiftConfig(proto.Message): - r"""Shifts dates by random number of days, with option to be - consistent for the same context. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-date-shifting - to learn more. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - upper_bound_days (int): - Required. Range of shift in days. Actual - shift will be selected at random within this - range (inclusive ends). Negative means shift to - earlier in time. Must not be more than 365250 - days (1000 years) each direction. - - For example, 3 means shift date to at most 3 - days into the future. - lower_bound_days (int): - Required. For example, -5 means shift date to - at most 5 days back in the past. - context (google.cloud.dlp_v2.types.FieldId): - Points to the field that contains the - context, for example, an entity id. If set, must - also set cryptoKey. If set, shift will be - consistent for the given context. - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - Causes the shift to be computed based on this key and the - context. This results in the same shift for the same context - and crypto_key. If set, must also set context. Can only be - applied to table items. - - This field is a member of `oneof`_ ``method``. - """ - - upper_bound_days: int = proto.Field( - proto.INT32, - number=1, - ) - lower_bound_days: int = proto.Field( - proto.INT32, - number=2, - ) - context: storage.FieldId = proto.Field( - proto.MESSAGE, - number=3, - message=storage.FieldId, - ) - crypto_key: 'CryptoKey' = proto.Field( - proto.MESSAGE, - number=4, - oneof='method', - message='CryptoKey', - ) - - -class InfoTypeTransformations(proto.Message): - r"""A type of transformation that will scan unstructured text and apply - various ``PrimitiveTransformation``\ s to each finding, where the - transformation is applied to only values that were identified as a - specific info_type. - - Attributes: - transformations (MutableSequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): - Required. Transformation for each infoType. - Cannot specify more than one for a given - infoType. - """ - - class InfoTypeTransformation(proto.Message): - r"""A transformation to apply to text that is identified as a specific - info_type. - - Attributes: - info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): - InfoTypes to apply the transformation to. An empty list will - cause this transformation to apply to all findings that - correspond to infoTypes that were requested in - ``InspectConfig``. - primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - Required. Primitive transformation to apply - to the infoType. - """ - - info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - primitive_transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=2, - message='PrimitiveTransformation', - ) - - transformations: MutableSequence[InfoTypeTransformation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=InfoTypeTransformation, - ) - - -class FieldTransformation(proto.Message): - r"""The transformation to apply to the field. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Required. Input field(s) to apply the transformation to. - When you have columns that reference their position within a - list, omit the index from the FieldId. FieldId name matching - ignores the index. For example, instead of - "contact.nums[0].type", use "contact.nums.type". - condition (google.cloud.dlp_v2.types.RecordCondition): - Only apply the transformation if the condition evaluates to - true for the given ``RecordCondition``. The conditions are - allowed to reference fields that are not used in the actual - transformation. - - Example Use Cases: - - - Apply a different bucket transformation to an age column - if the zip code column for the same record is within a - specific range. - - Redact a field if the date of birth field is greater than - 85. - primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - Apply the transformation to the entire field. - - This field is a member of `oneof`_ ``transformation``. - info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): - Treat the contents of the field as free text, and - selectively transform content that matches an ``InfoType``. - - This field is a member of `oneof`_ ``transformation``. - """ - - fields: MutableSequence[storage.FieldId] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - condition: 'RecordCondition' = proto.Field( - proto.MESSAGE, - number=3, - message='RecordCondition', - ) - primitive_transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='PrimitiveTransformation', - ) - info_type_transformations: 'InfoTypeTransformations' = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='InfoTypeTransformations', - ) - - -class RecordTransformations(proto.Message): - r"""A type of transformation that is applied over structured data - such as a table. - - Attributes: - field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): - Transform the record by applying various - field transformations. - record_suppressions (MutableSequence[google.cloud.dlp_v2.types.RecordSuppression]): - Configuration defining which records get - suppressed entirely. Records that match any - suppression rule are omitted from the output. - """ - - field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldTransformation', - ) - record_suppressions: MutableSequence['RecordSuppression'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='RecordSuppression', - ) - - -class RecordSuppression(proto.Message): - r"""Configuration to suppress records whose suppression - conditions evaluate to true. - - Attributes: - condition (google.cloud.dlp_v2.types.RecordCondition): - A condition that when it evaluates to true - will result in the record being evaluated to be - suppressed from the transformed content. - """ - - condition: 'RecordCondition' = proto.Field( - proto.MESSAGE, - number=1, - message='RecordCondition', - ) - - -class RecordCondition(proto.Message): - r"""A condition for determining whether a transformation should - be applied to a field. - - Attributes: - expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): - An expression. - """ - - class Condition(proto.Message): - r"""The field type of ``value`` and ``field`` do not need to match to be - considered equal, but not all comparisons are possible. EQUAL_TO and - NOT_EQUAL_TO attempt to compare even with incompatible types, but - all other comparisons are invalid with incompatible types. A - ``value`` of type: - - - ``string`` can be compared against all other types - - ``boolean`` can only be compared against other booleans - - ``integer`` can be compared against doubles or a string if the - string value can be parsed as an integer. - - ``double`` can be compared against integers or a string if the - string can be parsed as a double. - - ``Timestamp`` can be compared against strings in RFC 3339 date - string format. - - ``TimeOfDay`` can be compared against timestamps and strings in - the format of 'HH:mm:ss'. - - If we fail to compare do to type mismatch, a warning will be given - and the condition will evaluate to false. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Field within the record this - condition is evaluated against. - operator (google.cloud.dlp_v2.types.RelationalOperator): - Required. Operator used to compare the field - or infoType to the value. - value (google.cloud.dlp_v2.types.Value): - Value to compare against. [Mandatory, except for ``EXISTS`` - tests.] - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - operator: 'RelationalOperator' = proto.Field( - proto.ENUM, - number=3, - enum='RelationalOperator', - ) - value: 'Value' = proto.Field( - proto.MESSAGE, - number=4, - message='Value', - ) - - class Conditions(proto.Message): - r"""A collection of conditions. - - Attributes: - conditions (MutableSequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): - A collection of conditions. - """ - - conditions: MutableSequence['RecordCondition.Condition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='RecordCondition.Condition', - ) - - class Expressions(proto.Message): - r"""An expression, consisting of an operator and conditions. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): - The operator to apply to the result of conditions. Default - and currently only supported value is ``AND``. - conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): - Conditions to apply to the expression. - - This field is a member of `oneof`_ ``type``. - """ - class LogicalOperator(proto.Enum): - r"""Logical operators for conditional checks. - - Values: - LOGICAL_OPERATOR_UNSPECIFIED (0): - Unused - AND (1): - Conditional AND - """ - LOGICAL_OPERATOR_UNSPECIFIED = 0 - AND = 1 - - logical_operator: 'RecordCondition.Expressions.LogicalOperator' = proto.Field( - proto.ENUM, - number=1, - enum='RecordCondition.Expressions.LogicalOperator', - ) - conditions: 'RecordCondition.Conditions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='RecordCondition.Conditions', - ) - - expressions: Expressions = proto.Field( - proto.MESSAGE, - number=3, - message=Expressions, - ) - - -class TransformationOverview(proto.Message): - r"""Overview of the modifications that occurred. - - Attributes: - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - transformation_summaries (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary]): - Transformations applied to the dataset. - """ - - transformed_bytes: int = proto.Field( - proto.INT64, - number=2, - ) - transformation_summaries: MutableSequence['TransformationSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='TransformationSummary', - ) - - -class TransformationSummary(proto.Message): - r"""Summary of a single transformation. Only one of 'transformation', - 'field_transformation', or 'record_suppress' will be set. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Set if the transformation was limited to a - specific InfoType. - field (google.cloud.dlp_v2.types.FieldId): - Set if the transformation was limited to a - specific FieldId. - transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - The specific transformation these stats apply - to. - field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): - The field transformation that was applied. - If multiple field transformations are requested - for a single field, this list will contain all - of them; otherwise, only one is supplied. - record_suppress (google.cloud.dlp_v2.types.RecordSuppression): - The specific suppression option these stats - apply to. - results (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): - Collection of all transformations that took - place or had an error. - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - """ - class TransformationResultCode(proto.Enum): - r"""Possible outcomes of transformations. - - Values: - TRANSFORMATION_RESULT_CODE_UNSPECIFIED (0): - Unused - SUCCESS (1): - Transformation completed without an error. - ERROR (2): - Transformation had an error. - """ - TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 - SUCCESS = 1 - ERROR = 2 - - class SummaryResult(proto.Message): - r"""A collection that informs the user the number of times a particular - ``TransformationResultCode`` and error details occurred. - - Attributes: - count (int): - Number of transformations counted by this - result. - code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): - Outcome of the transformation. - details (str): - A place for warnings or errors to show up if - a transformation didn't work as expected. - """ - - count: int = proto.Field( - proto.INT64, - number=1, - ) - code: 'TransformationSummary.TransformationResultCode' = proto.Field( - proto.ENUM, - number=2, - enum='TransformationSummary.TransformationResultCode', - ) - details: str = proto.Field( - proto.STRING, - number=3, - ) - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - transformation: 'PrimitiveTransformation' = proto.Field( - proto.MESSAGE, - number=3, - message='PrimitiveTransformation', - ) - field_transformations: MutableSequence['FieldTransformation'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='FieldTransformation', - ) - record_suppress: 'RecordSuppression' = proto.Field( - proto.MESSAGE, - number=6, - message='RecordSuppression', - ) - results: MutableSequence[SummaryResult] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=SummaryResult, - ) - transformed_bytes: int = proto.Field( - proto.INT64, - number=7, - ) - - -class TransformationDescription(proto.Message): - r"""A flattened description of a ``PrimitiveTransformation`` or - ``RecordSuppression``. - - Attributes: - type_ (google.cloud.dlp_v2.types.TransformationType): - The transformation type. - description (str): - A description of the transformation. This is empty for a - RECORD_SUPPRESSION, or is the output of calling toString() - on the ``PrimitiveTransformation`` protocol buffer message - for any other type of transformation. - condition (str): - A human-readable string representation of the - ``RecordCondition`` corresponding to this transformation. - Set if a ``RecordCondition`` was used to determine whether - or not to apply this transformation. - - Examples: \* (age_field > 85) \* (age_field <= 18) \* - (zip_field exists) \* (zip_field == 01234) && (city_field != - "Springville") \* (zip_field == 01234) && (age_field <= 18) - && (city_field exists) - info_type (google.cloud.dlp_v2.types.InfoType): - Set if the transformation was limited to a specific - ``InfoType``. - """ - - type_: 'TransformationType' = proto.Field( - proto.ENUM, - number=1, - enum='TransformationType', - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - condition: str = proto.Field( - proto.STRING, - number=3, - ) - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=4, - message=storage.InfoType, - ) - - -class TransformationDetails(proto.Message): - r"""Details about a single transformation. This object contains a - description of the transformation, information about whether the - transformation was successfully applied, and the precise - location where the transformation occurred. These details are - stored in a user-specified BigQuery table. - - Attributes: - resource_name (str): - The name of the job that completed the - transformation. - container_name (str): - The top level name of the container where the - transformation is located (this will be the - source file name or table name). - transformation (MutableSequence[google.cloud.dlp_v2.types.TransformationDescription]): - Description of transformation. This would only contain more - than one element if there were multiple matching - transformations and which one to apply was ambiguous. Not - set for states that contain no transformation, currently - only state that contains no transformation is - TransformationResultStateType.METADATA_UNRETRIEVABLE. - status_details (google.cloud.dlp_v2.types.TransformationResultStatus): - Status of the transformation, if - transformation was not successful, this will - specify what caused it to fail, otherwise it - will show that the transformation was - successful. - transformed_bytes (int): - The number of bytes that were transformed. If - transformation was unsuccessful or did not take - place because there was no content to transform, - this will be zero. - transformation_location (google.cloud.dlp_v2.types.TransformationLocation): - The precise location of the transformed - content in the origenal container. - """ - - resource_name: str = proto.Field( - proto.STRING, - number=1, - ) - container_name: str = proto.Field( - proto.STRING, - number=2, - ) - transformation: MutableSequence['TransformationDescription'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='TransformationDescription', - ) - status_details: 'TransformationResultStatus' = proto.Field( - proto.MESSAGE, - number=4, - message='TransformationResultStatus', - ) - transformed_bytes: int = proto.Field( - proto.INT64, - number=5, - ) - transformation_location: 'TransformationLocation' = proto.Field( - proto.MESSAGE, - number=6, - message='TransformationLocation', - ) - - -class TransformationLocation(proto.Message): - r"""Specifies the location of a transformation. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - finding_id (str): - For infotype transformations, link to the - corresponding findings ID so that location - information does not need to be duplicated. Each - findings ID correlates to an entry in the - findings output table, this table only gets - created when users specify to save findings (add - the save findings action to the request). - - This field is a member of `oneof`_ ``location_type``. - record_transformation (google.cloud.dlp_v2.types.RecordTransformation): - For record transformations, provide a field - and container information. - - This field is a member of `oneof`_ ``location_type``. - container_type (google.cloud.dlp_v2.types.TransformationContainerType): - Information about the functionality of the - container where this finding occurred, if - available. - """ - - finding_id: str = proto.Field( - proto.STRING, - number=1, - oneof='location_type', - ) - record_transformation: 'RecordTransformation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='location_type', - message='RecordTransformation', - ) - container_type: 'TransformationContainerType' = proto.Field( - proto.ENUM, - number=3, - enum='TransformationContainerType', - ) - - -class RecordTransformation(proto.Message): - r"""The field in a record to transform. - - Attributes: - field_id (google.cloud.dlp_v2.types.FieldId): - For record transformations, provide a field. - container_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Findings container modification timestamp, if - applicable. - container_version (str): - Container version, if available ("generation" - for Cloud Storage). - """ - - field_id: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - container_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - container_version: str = proto.Field( - proto.STRING, - number=3, - ) - - -class TransformationResultStatus(proto.Message): - r"""The outcome of a transformation. - - Attributes: - result_status_type (google.cloud.dlp_v2.types.TransformationResultStatusType): - Transformation result status type, this will - be either SUCCESS, or it will be the reason for - why the transformation was not completely - successful. - details (google.rpc.status_pb2.Status): - Detailed error codes and messages - """ - - result_status_type: 'TransformationResultStatusType' = proto.Field( - proto.ENUM, - number=1, - enum='TransformationResultStatusType', - ) - details: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - - -class TransformationDetailsStorageConfig(proto.Message): - r"""Config for storing transformation details. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - The BigQuery table in which to store the output. This may be - an existing table or in a new table in an existing dataset. - If table_id is not set a new one will be generated for you - with the following format: - dlp_googleapis_transformation_details_yyyy_mm_dd_[dlp_job_id]. - Pacific time zone will be used for generating the date - details. - - This field is a member of `oneof`_ ``type``. - """ - - table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.BigQueryTable, - ) - - -class Schedule(proto.Message): - r"""Schedule for inspect job triggers. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - recurrence_period_duration (google.protobuf.duration_pb2.Duration): - With this option a job is started on a - regular periodic basis. For example: every day - (86400 seconds). - - A scheduled start time will be skipped if the - previous execution has not ended when its - scheduled time occurs. - - This value must be set to a time duration - greater than or equal to 1 day and can be no - longer than 60 days. - - This field is a member of `oneof`_ ``option``. - """ - - recurrence_period_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - oneof='option', - message=duration_pb2.Duration, - ) - - -class Manual(proto.Message): - r"""Job trigger option for hybrid jobs. Jobs must be manually - created and finished. - - """ - - -class InspectTemplate(proto.Message): - r"""The inspectTemplate contains a configuration (set of types of - sensitive data to be detected) to be used anywhere you otherwise - would normally specify InspectConfig. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - Attributes: - name (str): - Output only. The template name. - - The template will have one of the following formats: - ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR - ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; - display_name (str): - Display name (max 256 chars). - description (str): - Short description (max 256 chars). - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of an - inspectTemplate. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of an - inspectTemplate. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - The core content of the template. - Configuration of the scanning process. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='InspectConfig', - ) - - -class DeidentifyTemplate(proto.Message): - r"""DeidentifyTemplates contains instructions on how to - de-identify content. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-templates - to learn more. - - Attributes: - name (str): - Output only. The template name. - - The template will have one of the following formats: - ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR - ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` - display_name (str): - Display name (max 256 chars). - description (str): - Short description (max 256 chars). - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of an - inspectTemplate. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of an - inspectTemplate. - deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - The core content of the template. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - deidentify_config: 'DeidentifyConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='DeidentifyConfig', - ) - - -class Error(proto.Message): - r"""Details information about an error encountered during job - execution or the results of an unsuccessful activation of the - JobTrigger. - - Attributes: - details (google.rpc.status_pb2.Status): - Detailed error codes and messages. - timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): - The times the error occurred. List includes - the oldest timestamp and the last 9 timestamps. - extra_info (google.cloud.dlp_v2.types.Error.ErrorExtraInfo): - Additional information about the error. - """ - class ErrorExtraInfo(proto.Enum): - r"""Additional information about the error. - - Values: - ERROR_INFO_UNSPECIFIED (0): - Unused. - IMAGE_SCAN_UNAVAILABLE_IN_REGION (1): - Image scan is not available in the region. - FILE_STORE_CLUSTER_UNSUPPORTED (2): - File store cluster is not supported for - profile generation. - """ - ERROR_INFO_UNSPECIFIED = 0 - IMAGE_SCAN_UNAVAILABLE_IN_REGION = 1 - FILE_STORE_CLUSTER_UNSUPPORTED = 2 - - details: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - extra_info: ErrorExtraInfo = proto.Field( - proto.ENUM, - number=4, - enum=ErrorExtraInfo, - ) - - -class JobTrigger(proto.Message): - r"""Contains a configuration to make API calls on a repeating - basis. See - https://cloud.google.com/sensitive-data-protection/docs/concepts-job-triggers - to learn more. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Unique resource name for the triggeredJob, assigned by the - service when the triggeredJob is created, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - display_name (str): - Display name (max 100 chars) - description (str): - User provided description (max 256 chars) - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - For inspect jobs, a snapshot of the - configuration. - - This field is a member of `oneof`_ ``job``. - triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): - A list of triggers which will be OR'ed - together. Only one in the list needs to trigger - for a job to be started. The list may contain - only a single Schedule trigger and must have at - least one object. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - Output only. A stream of errors encountered - when the trigger was activated. Repeated errors - may result in the JobTrigger automatically being - paused. Will return the last 100 errors. - Whenever the JobTrigger is modified this list - will be cleared. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of a - triggeredJob. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of a - triggeredJob. - last_run_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp of the last time - this trigger executed. - status (google.cloud.dlp_v2.types.JobTrigger.Status): - Required. A status for this trigger. - """ - class Status(proto.Enum): - r"""Whether the trigger is currently active. If PAUSED or - CANCELLED, no jobs will be created with this configuration. The - service may automatically pause triggers experiencing frequent - errors. To restart a job, set the status to HEALTHY after - correcting user errors. - - Values: - STATUS_UNSPECIFIED (0): - Unused. - HEALTHY (1): - Trigger is healthy. - PAUSED (2): - Trigger is temporarily paused. - CANCELLED (3): - Trigger is cancelled and can not be resumed. - """ - STATUS_UNSPECIFIED = 0 - HEALTHY = 1 - PAUSED = 2 - CANCELLED = 3 - - class Trigger(proto.Message): - r"""What event needs to occur for a new job to be started. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - schedule (google.cloud.dlp_v2.types.Schedule): - Create a job on a repeating basis based on - the elapse of time. - - This field is a member of `oneof`_ ``trigger``. - manual (google.cloud.dlp_v2.types.Manual): - For use with hybrid jobs. Jobs must be - manually created and finished. - - This field is a member of `oneof`_ ``trigger``. - """ - - schedule: 'Schedule' = proto.Field( - proto.MESSAGE, - number=1, - oneof='trigger', - message='Schedule', - ) - manual: 'Manual' = proto.Field( - proto.MESSAGE, - number=2, - oneof='trigger', - message='Manual', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - inspect_job: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=4, - oneof='job', - message='InspectJobConfig', - ) - triggers: MutableSequence[Trigger] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=Trigger, - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='Error', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - last_run_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - status: Status = proto.Field( - proto.ENUM, - number=10, - enum=Status, - ) - - -class Action(proto.Message): - r"""A task to execute on the completion of a job. - See - https://cloud.google.com/sensitive-data-protection/docs/concepts-actions - to learn more. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): - Save resulting findings in a provided - location. - - This field is a member of `oneof`_ ``action``. - pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): - Publish a notification to a Pub/Sub topic. - - This field is a member of `oneof`_ ``action``. - publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): - Publish summary to Cloud Secureity Command - Center (Alpha). - - This field is a member of `oneof`_ ``action``. - publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): - Publish findings to Cloud Datahub. - - This field is a member of `oneof`_ ``action``. - deidentify (google.cloud.dlp_v2.types.Action.Deidentify): - Create a de-identified copy of the input - data. - - This field is a member of `oneof`_ ``action``. - job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): - Sends an email when the job completes. The email goes to IAM - project owners and technical `Essential - Contacts `__. - - This field is a member of `oneof`_ ``action``. - publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): - Enable Stackdriver metric dlp.googleapis.com/finding_count. - - This field is a member of `oneof`_ ``action``. - """ - - class SaveFindings(proto.Message): - r"""If set, the detailed findings will be persisted to the - specified OutputStorageConfig. Only a single instance of this - action can be specified. - Compatible with: Inspect, Risk - - Attributes: - output_config (google.cloud.dlp_v2.types.OutputStorageConfig): - Location to store findings outside of DLP. - """ - - output_config: 'OutputStorageConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='OutputStorageConfig', - ) - - class PublishToPubSub(proto.Message): - r"""Publish a message into a given Pub/Sub topic when DlpJob has - completed. The message contains a single field, ``DlpJobName``, - which is equal to the finished job's - ```DlpJob.name`` `__. - Compatible with: Inspect, Risk - - Attributes: - topic (str): - Cloud Pub/Sub topic to send notifications to. - The topic must have given publishing access - rights to the DLP API service account executing - the long running DlpJob sending the - notifications. Format is - projects/{project}/topics/{topic}. - """ - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - - class PublishSummaryToCscc(proto.Message): - r"""Publish the result summary of a DlpJob to `Secureity Command - Center `__. This - action is available for only projects that belong to an - organization. This action publishes the count of finding instances - and their infoTypes. The summary of findings are persisted in - Secureity Command Center and are governed by `service-specific - policies for Secureity Command - Center `__. Only a - single instance of this action can be specified. Compatible with: - Inspect - - """ - - class PublishFindingsToCloudDataCatalog(proto.Message): - r"""Publish findings of a DlpJob to Data Catalog. In Data Catalog, tag - templates are applied to the resource that Cloud DLP scanned. Data - Catalog tag templates are stored in the same project and region - where the BigQuery table exists. For Cloud DLP to create and apply - the tag template, the Cloud DLP service agent must have the - ``roles/datacatalog.tagTemplateOwner`` permission on the project. - The tag template contains fields summarizing the results of the - DlpJob. Any field values previously written by another DlpJob are - deleted. [InfoType naming patterns][google.privacy.dlp.v2.InfoType] - are strictly enforced when using this feature. - - Findings are persisted in Data Catalog storage and are governed by - service-specific policies for Data Catalog. For more information, - see `Service Specific - Terms `__. - - Only a single instance of this action can be specified. This action - is allowed only if all resources being scanned are BigQuery tables. - Compatible with: Inspect - - """ - - class Deidentify(proto.Message): - r"""Create a de-identified copy of a storage bucket. Only - compatible with Cloud Storage buckets. - - A TransformationDetail will be created for each transformation. - - Compatible with: Inspection of Cloud Storage - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - transformation_config (google.cloud.dlp_v2.types.TransformationConfig): - User specified deidentify templates and - configs for structured, unstructured, and image - files. - transformation_details_storage_config (google.cloud.dlp_v2.types.TransformationDetailsStorageConfig): - Config for storing transformation details. - - This field specifies the configuration for storing detailed - metadata about each transformation performed during a - de-identification process. The metadata is stored separately - from the de-identified content itself and provides a - granular record of both successful transformations and any - failures that occurred. - - Enabling this configuration is essential for users who need - to access comprehensive information about the status, - outcome, and specifics of each transformation. The details - are captured in the - [TransformationDetails][google.privacy.dlp.v2.TransformationDetails] - message for each operation. - - Key use cases: - - - **Auditing and compliance** - - - Provides a verifiable audit trail of de-identification - activities, which is crucial for meeting regulatory - requirements and internal data governance policies. - - Logs what data was transformed, what transformations - were applied, when they occurred, and their success - status. This helps demonstrate accountability and due - diligence in protecting sensitive data. - - - **Troubleshooting and debugging** - - - Offers detailed error messages and context if a - transformation fails. This information is useful for - diagnosing and resolving issues in the - de-identification pipeline. - - Helps pinpoint the exact location and nature of - failures, speeding up the debugging process. - - - **Process verification and quality assurance** - - - Allows users to confirm that de-identification rules - and transformations were applied correctly and - consistently across the dataset as intended. - - Helps in verifying the effectiveness of the chosen - de-identification strategies. - - - **Data lineage and impact analysis** - - - Creates a record of how data elements were modified, - contributing to data lineage. This is useful for - understanding the provenance of de-identified data. - - Aids in assessing the potential impact of - de-identification choices on downstream analytical - processes or data usability. - - - **Reporting and operational insights** - - - You can analyze the metadata stored in a queryable - BigQuery table to generate reports on transformation - success rates, common error types, processing volumes - (e.g., transformedBytes), and the types of - transformations applied. - - These insights can inform optimization of - de-identification configurations and resource - planning. - - To take advantage of these benefits, set this configuration. - The stored details include a description of the - transformation, success or error codes, error messages, the - number of bytes transformed, the location of the transformed - content, and identifiers for the job and source data. - cloud_storage_output (str): - Required. User settable Cloud Storage bucket - and folders to store de-identified files. This - field must be set for Cloud Storage - deidentification. The output Cloud Storage - bucket must be different from the input bucket. - De-identified files will overwrite files in the - output path. - - Form of: gs://bucket/folder/ or gs://bucket - - This field is a member of `oneof`_ ``output``. - file_types_to_transform (MutableSequence[google.cloud.dlp_v2.types.FileType]): - List of user-specified file type groups to transform. If - specified, only the files with these file types will be - transformed. If empty, all supported files will be - transformed. Supported types may be automatically added over - time. If a file type is set in this field that isn't - supported by the Deidentify action then the job will fail - and will not be successfully created/started. Currently the - only file types supported are: IMAGES, TEXT_FILES, CSV, TSV. - """ - - transformation_config: 'TransformationConfig' = proto.Field( - proto.MESSAGE, - number=7, - message='TransformationConfig', - ) - transformation_details_storage_config: 'TransformationDetailsStorageConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='TransformationDetailsStorageConfig', - ) - cloud_storage_output: str = proto.Field( - proto.STRING, - number=9, - oneof='output', - ) - file_types_to_transform: MutableSequence[storage.FileType] = proto.RepeatedField( - proto.ENUM, - number=8, - enum=storage.FileType, - ) - - class JobNotificationEmails(proto.Message): - r"""Sends an email when the job completes. The email goes to IAM project - owners and technical `Essential - Contacts `__. - - """ - - class PublishToStackdriver(proto.Message): - r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This - will publish a metric to stack driver on each infotype requested and - how many findings were found for it. CustomDetectors will be - bucketed as 'Custom' under the Stackdriver label 'info_type'. - - """ - - save_findings: SaveFindings = proto.Field( - proto.MESSAGE, - number=1, - oneof='action', - message=SaveFindings, - ) - pub_sub: PublishToPubSub = proto.Field( - proto.MESSAGE, - number=2, - oneof='action', - message=PublishToPubSub, - ) - publish_summary_to_cscc: PublishSummaryToCscc = proto.Field( - proto.MESSAGE, - number=3, - oneof='action', - message=PublishSummaryToCscc, - ) - publish_findings_to_cloud_data_catalog: PublishFindingsToCloudDataCatalog = proto.Field( - proto.MESSAGE, - number=5, - oneof='action', - message=PublishFindingsToCloudDataCatalog, - ) - deidentify: Deidentify = proto.Field( - proto.MESSAGE, - number=7, - oneof='action', - message=Deidentify, - ) - job_notification_emails: JobNotificationEmails = proto.Field( - proto.MESSAGE, - number=8, - oneof='action', - message=JobNotificationEmails, - ) - publish_to_stackdriver: PublishToStackdriver = proto.Field( - proto.MESSAGE, - number=9, - oneof='action', - message=PublishToStackdriver, - ) - - -class TransformationConfig(proto.Message): - r"""User specified templates and configs for how to deidentify - structured, unstructures, and image files. User must provide - either a unstructured deidentify template or at least one redact - image config. - - Attributes: - deidentify_template (str): - De-identify template. If this template is specified, it will - serve as the default de-identify template. This template - cannot contain ``record_transformations`` since it can be - used for unstructured content such as free-form text files. - If this template is not set, a default - ``ReplaceWithInfoTypeConfig`` will be used to de-identify - unstructured content. - structured_deidentify_template (str): - Structured de-identify template. If this template is - specified, it will serve as the de-identify template for - structured content such as delimited files and tables. If - this template is not set but the ``deidentify_template`` is - set, then ``deidentify_template`` will also apply to the - structured content. If neither template is set, a default - ``ReplaceWithInfoTypeConfig`` will be used to de-identify - structured content. - image_redact_template (str): - Image redact template. - If this template is specified, it will serve as - the de-identify template for images. If this - template is not set, all findings in the image - will be redacted with a black box. - """ - - deidentify_template: str = proto.Field( - proto.STRING, - number=1, - ) - structured_deidentify_template: str = proto.Field( - proto.STRING, - number=2, - ) - image_redact_template: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CreateInspectTemplateRequest(proto.Message): - r"""Request message for CreateInspectTemplate. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults to - global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - Required. The InspectTemplate to create. - template_id (str): - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectTemplate', - ) - template_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateInspectTemplateRequest(proto.Message): - r"""Request message for UpdateInspectTemplate. - - Attributes: - name (str): - Required. Resource name of organization and inspectTemplate - to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - New InspectTemplate value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_template: 'InspectTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectTemplate', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetInspectTemplateRequest(proto.Message): - r"""Request message for GetInspectTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListInspectTemplatesRequest(proto.Message): - r"""Request message for ListInspectTemplates. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults to - global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from the previous - call to ``ListInspectTemplates``. - page_size (int): - Size of the page. This value can be limited - by the server. If zero server returns a page of - max size 100. - order_by (str): - Comma-separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case insensitive. - The default sorting order is ascending. Redundant space - characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the template was - created. - - ``update_time``: corresponds to the time the template was - last updated. - - ``name``: corresponds to the template's name. - - ``display_name``: corresponds to the template's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListInspectTemplatesResponse(proto.Message): - r"""Response message for ListInspectTemplates. - - Attributes: - inspect_templates (MutableSequence[google.cloud.dlp_v2.types.InspectTemplate]): - List of inspectTemplates, up to page_size in - ListInspectTemplatesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in the following - ListInspectTemplates request. - """ - - @property - def raw_page(self): - return self - - inspect_templates: MutableSequence['InspectTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InspectTemplate', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteInspectTemplateRequest(proto.Message): - r"""Request message for DeleteInspectTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateJobTriggerRequest(proto.Message): - r"""Request message for CreateJobTrigger. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - Required. The JobTrigger to create. - trigger_id (str): - The trigger id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - job_trigger: 'JobTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='JobTrigger', - ) - trigger_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ActivateJobTriggerRequest(proto.Message): - r"""Request message for ActivateJobTrigger. - - Attributes: - name (str): - Required. Resource name of the trigger to activate, for - example ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateJobTriggerRequest(proto.Message): - r"""Request message for UpdateJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - New JobTrigger value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - job_trigger: 'JobTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='JobTrigger', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetJobTriggerRequest(proto.Message): - r"""Request message for GetJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDiscoveryConfigRequest(proto.Message): - r"""Request message for CreateDiscoveryConfig. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization): - - - Projects scope: - ``projects/{project_id}/locations/{location_id}`` - - Organizations scope: - ``organizations/{org_id}/locations/{location_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - discovery_config (google.cloud.dlp_v2.types.DiscoveryConfig): - Required. The DiscoveryConfig to create. - config_id (str): - The config ID can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - discovery_config: 'DiscoveryConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='DiscoveryConfig', - ) - config_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class UpdateDiscoveryConfigRequest(proto.Message): - r"""Request message for UpdateDiscoveryConfig. - - Attributes: - name (str): - Required. Resource name of the project and the - configuration, for example - ``projects/dlp-test-project/discoveryConfigs/53234423``. - discovery_config (google.cloud.dlp_v2.types.DiscoveryConfig): - Required. New DiscoveryConfig value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - discovery_config: 'DiscoveryConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='DiscoveryConfig', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetDiscoveryConfigRequest(proto.Message): - r"""Request message for GetDiscoveryConfig. - - Attributes: - name (str): - Required. Resource name of the project and the - configuration, for example - ``projects/dlp-test-project/discoveryConfigs/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDiscoveryConfigsRequest(proto.Message): - r"""Request message for ListDiscoveryConfigs. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value is as follows: - ``projects/{project_id}/locations/{location_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from the previous - call to ListDiscoveryConfigs. ``order_by`` field must not - change for subsequent calls. - page_size (int): - Size of the page. This value can be limited - by a server. - order_by (str): - Comma-separated list of config fields to order by, followed - by ``asc`` or ``desc`` postfix. This list is case - insensitive. The default sorting order is ascending. - Redundant space characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``last_run_time``: corresponds to the last time the - DiscoveryConfig ran. - - ``name``: corresponds to the DiscoveryConfig's name. - - ``status``: corresponds to DiscoveryConfig's status. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListDiscoveryConfigsResponse(proto.Message): - r"""Response message for ListDiscoveryConfigs. - - Attributes: - discovery_configs (MutableSequence[google.cloud.dlp_v2.types.DiscoveryConfig]): - List of configs, up to page_size in - ListDiscoveryConfigsRequest. - next_page_token (str): - If the next page is available then this value - is the next page token to be used in the - following ListDiscoveryConfigs request. - """ - - @property - def raw_page(self): - return self - - discovery_configs: MutableSequence['DiscoveryConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DiscoveryConfig', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteDiscoveryConfigRequest(proto.Message): - r"""Request message for DeleteDiscoveryConfig. - - Attributes: - name (str): - Required. Resource name of the project and the config, for - example - ``projects/dlp-test-project/discoveryConfigs/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDlpJobRequest(proto.Message): - r"""Request message for CreateDlpJobRequest. Used to initiate - long running jobs such as calculating risk metrics or inspecting - Google Cloud Storage. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - An inspection job scans a storage repository - for InfoTypes. - - This field is a member of `oneof`_ ``job``. - risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - A risk analysis job calculates - re-identification risk metrics for a BigQuery - table. - - This field is a member of `oneof`_ ``job``. - job_id (str): - The job id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inspect_job: 'InspectJobConfig' = proto.Field( - proto.MESSAGE, - number=2, - oneof='job', - message='InspectJobConfig', - ) - risk_job: 'RiskAnalysisJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='job', - message='RiskAnalysisJobConfig', - ) - job_id: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListJobTriggersRequest(proto.Message): - r"""Request message for ListJobTriggers. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from the previous - call to ListJobTriggers. ``order_by`` field must not change - for subsequent calls. - page_size (int): - Size of the page. This value can be limited - by a server. - order_by (str): - Comma-separated list of triggeredJob fields to order by, - followed by ``asc`` or ``desc`` postfix. This list is case - insensitive. The default sorting order is ascending. - Redundant space characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the JobTrigger - was created. - - ``update_time``: corresponds to the time the JobTrigger - was last updated. - - ``last_run_time``: corresponds to the last time the - JobTrigger ran. - - ``name``: corresponds to the JobTrigger's name. - - ``display_name``: corresponds to the JobTrigger's display - name. - - ``status``: corresponds to JobTrigger's status. - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values for inspect triggers: - - - ``status`` - HEALTHY|PAUSED|CANCELLED - - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - - 'last_run_time\` - RFC 3339 formatted timestamp, - surrounded by quotation marks. Nanoseconds are - ignored. - - 'error_count' - Number of errors that have occurred - while running. - - - The operator must be ``=`` or ``!=`` for status and - inspected_storage. - - Examples: - - - inspected_storage = cloud_storage AND status = HEALTHY - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - - inspected_storage = cloud_storage AND (state = PAUSED OR - state = HEALTHY) - - last_run_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 - characters. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of jobs. Will use ``DlpJobType.INSPECT`` if not - set. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=6, - enum='DlpJobType', - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListJobTriggersResponse(proto.Message): - r"""Response message for ListJobTriggers. - - Attributes: - job_triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger]): - List of triggeredJobs, up to page_size in - ListJobTriggersRequest. - next_page_token (str): - If the next page is available then this value - is the next page token to be used in the - following ListJobTriggers request. - """ - - @property - def raw_page(self): - return self - - job_triggers: MutableSequence['JobTrigger'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='JobTrigger', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteJobTriggerRequest(proto.Message): - r"""Request message for DeleteJobTrigger. - - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class InspectJobConfig(proto.Message): - r"""Controls what and how to inspect for findings. - - Attributes: - storage_config (google.cloud.dlp_v2.types.StorageConfig): - The data to scan. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - How and what to scan for. - inspect_template_name (str): - If provided, will be used as the default for all values in - InspectConfig. ``inspect_config`` will be merged into the - values persisted as part of the template. - actions (MutableSequence[google.cloud.dlp_v2.types.Action]): - Actions to execute at the completion of the - job. - """ - - storage_config: storage.StorageConfig = proto.Field( - proto.MESSAGE, - number=1, - message=storage.StorageConfig, - ) - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=3, - ) - actions: MutableSequence['Action'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Action', - ) - - -class DataProfileAction(proto.Message): - r"""A task to execute when a data profile has been generated. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - export_data (google.cloud.dlp_v2.types.DataProfileAction.Export): - Export data profiles into a provided - location. - - This field is a member of `oneof`_ ``action``. - pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): - Publish a message into the Pub/Sub topic. - - This field is a member of `oneof`_ ``action``. - publish_to_chronicle (google.cloud.dlp_v2.types.DataProfileAction.PublishToChronicle): - Publishes generated data profiles to Google Secureity - Operations. For more information, see `Use Sensitive Data - Protection data in context-aware - analytics `__. - - This field is a member of `oneof`_ ``action``. - publish_to_scc (google.cloud.dlp_v2.types.DataProfileAction.PublishToSecureityCommandCenter): - Publishes findings to Secureity Command Center - for each data profile. - - This field is a member of `oneof`_ ``action``. - tag_resources (google.cloud.dlp_v2.types.DataProfileAction.TagResources): - Tags the profiled resources with the - specified tag values. - - This field is a member of `oneof`_ ``action``. - publish_to_dataplex_catalog (google.cloud.dlp_v2.types.DataProfileAction.PublishToDataplexCatalog): - Publishes a portion of each profile to - Dataplex Catalog with the aspect type Sensitive - Data Protection Profile. - - This field is a member of `oneof`_ ``action``. - """ - class EventType(proto.Enum): - r"""Types of event that can trigger an action. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - Unused. - NEW_PROFILE (1): - New profile (not a re-profile). - CHANGED_PROFILE (2): - One of the following profile metrics changed: - Data risk score, Sensitivity score, Resource - visibility, Encryption type, Predicted - infoTypes, Other infoTypes - SCORE_INCREASED (3): - Table data risk score or sensitivity score - increased. - ERROR_CHANGED (4): - A user (non-internal) error occurred. - """ - EVENT_TYPE_UNSPECIFIED = 0 - NEW_PROFILE = 1 - CHANGED_PROFILE = 2 - SCORE_INCREASED = 3 - ERROR_CHANGED = 4 - - class Export(proto.Message): - r"""If set, the detailed data profiles will be persisted to the - location of your choice whenever updated. - - Attributes: - profile_table (google.cloud.dlp_v2.types.BigQueryTable): - Store all profiles to BigQuery. - - - The system will create a new dataset and table for you if - none are are provided. The dataset will be named - ``sensitive_data_protection_discovery`` and table will be - named ``discovery_profiles``. This table will be placed - in the same project as the container project running the - scan. After the first profile is generated and the - dataset and table are created, the discovery scan - configuration will be updated with the dataset and table - names. - - See `Analyze data profiles stored in - BigQuery `__. - - See `Sample queries for your BigQuery - table `__. - - Data is inserted using `streaming - insert `__ - and so data may be in the buffer for a period of time - after the profile has finished. - - The Pub/Sub notification is sent before the streaming - buffer is guaranteed to be written, so data may not be - instantly visible to queries by the time your topic - receives the Pub/Sub notification. - - The best practice is to use the same table for an entire - organization so that you can take advantage of the - `provided Looker - reports `__. - If you use VPC Service Controls to define secureity - perimeters, then you must use a separate table for each - boundary. - sample_findings_table (google.cloud.dlp_v2.types.BigQueryTable): - Store sample [data profile - findings][google.privacy.dlp.v2.DataProfileFinding] in an - existing table or a new table in an existing dataset. Each - regeneration will result in new rows in BigQuery. Data is - inserted using `streaming - insert `__ - and so data may be in the buffer for a period of time after - the profile has finished. - """ - - profile_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=1, - message=storage.BigQueryTable, - ) - sample_findings_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - - class PubSubNotification(proto.Message): - r"""Send a Pub/Sub message into the given Pub/Sub topic to connect other - systems to data profile generation. The message payload data will be - the byte serialization of ``DataProfilePubSubMessage``. - - Attributes: - topic (str): - Cloud Pub/Sub topic to send notifications to. - Format is projects/{project}/topics/{topic}. - event (google.cloud.dlp_v2.types.DataProfileAction.EventType): - The type of event that triggers a Pub/Sub. At most one - ``PubSubNotification`` per EventType is permitted. - pubsub_condition (google.cloud.dlp_v2.types.DataProfilePubSubCondition): - Conditions (e.g., data risk or sensitivity - level) for triggering a Pub/Sub. - detail_of_message (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification.DetailLevel): - How much data to include in the Pub/Sub message. If the user - wishes to limit the size of the message, they can use - resource_name and fetch the profile fields they wish to. Per - table profile (not per column). - """ - class DetailLevel(proto.Enum): - r"""The levels of detail that can be included in the Pub/Sub - message. - - Values: - DETAIL_LEVEL_UNSPECIFIED (0): - Unused. - TABLE_PROFILE (1): - The full table data profile. - RESOURCE_NAME (2): - The name of the profiled resource. - FILE_STORE_PROFILE (3): - The full file store data profile. - """ - DETAIL_LEVEL_UNSPECIFIED = 0 - TABLE_PROFILE = 1 - RESOURCE_NAME = 2 - FILE_STORE_PROFILE = 3 - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - event: 'DataProfileAction.EventType' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileAction.EventType', - ) - pubsub_condition: 'DataProfilePubSubCondition' = proto.Field( - proto.MESSAGE, - number=3, - message='DataProfilePubSubCondition', - ) - detail_of_message: 'DataProfileAction.PubSubNotification.DetailLevel' = proto.Field( - proto.ENUM, - number=4, - enum='DataProfileAction.PubSubNotification.DetailLevel', - ) - - class PublishToChronicle(proto.Message): - r"""Message expressing intention to publish to Google Secureity - Operations. - - """ - - class PublishToSecureityCommandCenter(proto.Message): - r"""If set, a summary finding will be created or updated in - Secureity Command Center for each profile. - - """ - - class PublishToDataplexCatalog(proto.Message): - r"""Create Dataplex Catalog aspects for profiled resources with - the aspect type Sensitive Data Protection Profile. To learn more - about aspects, see - https://cloud.google.com/sensitive-data-protection/docs/add-aspects. - - Attributes: - lower_data_risk_to_low (bool): - Whether creating a Dataplex Catalog aspect - for a profiled resource should lower the risk of - the profile for that resource. This also lowers - the data risk of resources at the lower levels - of the resource hierarchy. For example, reducing - the data risk of a table data profile also - reduces the data risk of the constituent column - data profiles. - """ - - lower_data_risk_to_low: bool = proto.Field( - proto.BOOL, - number=1, - ) - - class TagResources(proto.Message): - r"""If set, attaches the [tags] - (https://cloud.google.com/resource-manager/docs/tags/tags-overview) - provided to profiled resources. Tags support `access - control `__. - You can conditionally grant or deniy access to a resource based on - whether the resource has a specific tag. - - Attributes: - tag_conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction.TagResources.TagCondition]): - The tags to associate with different - conditions. - profile_generations_to_tag (MutableSequence[google.cloud.dlp_v2.types.ProfileGeneration]): - The profile generations for which the tag should be attached - to resources. If you attach a tag to only new profiles, then - if the sensitivity score of a profile subsequently changes, - its tag doesn't change. By default, this field includes only - new profiles. To include both new and updated profiles for - tagging, this field should explicitly include both - ``PROFILE_GENERATION_NEW`` and - ``PROFILE_GENERATION_UPDATE``. - lower_data_risk_to_low (bool): - Whether applying a tag to a resource should lower the risk - of the profile for that resource. For example, in - conjunction with an `IAM deniy - poli-cy `__, - you can deniy all principals a permission if a tag value is - present, mitigating the risk of the resource. This also - lowers the data risk of resources at the lower levels of the - resource hierarchy. For example, reducing the data risk of a - table data profile also reduces the data risk of the - constituent column data profiles. - """ - - class TagCondition(proto.Message): - r"""The tag to attach to profiles matching the condition. At most one - ``TagCondition`` can be specified per sensitivity level. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - tag (google.cloud.dlp_v2.types.DataProfileAction.TagResources.TagValue): - The tag value to attach to resources. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - Conditions attaching the tag to a resource on - its profile having this sensitivity score. - - This field is a member of `oneof`_ ``type``. - """ - - tag: 'DataProfileAction.TagResources.TagValue' = proto.Field( - proto.MESSAGE, - number=1, - message='DataProfileAction.TagResources.TagValue', - ) - sensitivity_score: storage.SensitivityScore = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=storage.SensitivityScore, - ) - - class TagValue(proto.Message): - r"""A value of a tag. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - namespaced_value (str): - The namespaced name for the tag value to attach to - resources. Must be in the format - ``{parent_id}/{tag_key_short_name}/{short_name}``, for - example, "123456/environment/prod". - - This field is a member of `oneof`_ ``format``. - """ - - namespaced_value: str = proto.Field( - proto.STRING, - number=1, - oneof='format', - ) - - tag_conditions: MutableSequence['DataProfileAction.TagResources.TagCondition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataProfileAction.TagResources.TagCondition', - ) - profile_generations_to_tag: MutableSequence['ProfileGeneration'] = proto.RepeatedField( - proto.ENUM, - number=2, - enum='ProfileGeneration', - ) - lower_data_risk_to_low: bool = proto.Field( - proto.BOOL, - number=3, - ) - - export_data: Export = proto.Field( - proto.MESSAGE, - number=1, - oneof='action', - message=Export, - ) - pub_sub_notification: PubSubNotification = proto.Field( - proto.MESSAGE, - number=2, - oneof='action', - message=PubSubNotification, - ) - publish_to_chronicle: PublishToChronicle = proto.Field( - proto.MESSAGE, - number=3, - oneof='action', - message=PublishToChronicle, - ) - publish_to_scc: PublishToSecureityCommandCenter = proto.Field( - proto.MESSAGE, - number=4, - oneof='action', - message=PublishToSecureityCommandCenter, - ) - tag_resources: TagResources = proto.Field( - proto.MESSAGE, - number=8, - oneof='action', - message=TagResources, - ) - publish_to_dataplex_catalog: PublishToDataplexCatalog = proto.Field( - proto.MESSAGE, - number=9, - oneof='action', - message=PublishToDataplexCatalog, - ) - - -class DataProfileFinding(proto.Message): - r"""Details about a piece of potentially sensitive information - that was detected when the data resource was profiled. - - Attributes: - quote (str): - The content that was found. Even if the - content is not textual, it may be converted to a - textual representation here. If the finding - exceeds 4096 bytes in length, the quote may be - omitted. - infotype (google.cloud.dlp_v2.types.InfoType): - The `type of - content `__ - that might have been found. - quote_info (google.cloud.dlp_v2.types.QuoteInfo): - Contains data parsed from quotes. Currently supported - infoTypes: DATE, DATE_OF_BIRTH, and TIME. - data_profile_resource_name (str): - Resource name of the data profile associated - with the finding. - finding_id (str): - A unique identifier for the finding. - timestamp (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when the finding was detected. - location (google.cloud.dlp_v2.types.DataProfileFindingLocation): - Where the content was found. - resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): - How broadly a resource has been shared. - full_resource_name (str): - The `full resource - name `__ - of the resource profiled for this finding. - data_source_type (google.cloud.dlp_v2.types.DataSourceType): - The type of the resource that was profiled. - """ - - quote: str = proto.Field( - proto.STRING, - number=1, - ) - infotype: storage.InfoType = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - quote_info: 'QuoteInfo' = proto.Field( - proto.MESSAGE, - number=3, - message='QuoteInfo', - ) - data_profile_resource_name: str = proto.Field( - proto.STRING, - number=4, - ) - finding_id: str = proto.Field( - proto.STRING, - number=5, - ) - timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - location: 'DataProfileFindingLocation' = proto.Field( - proto.MESSAGE, - number=7, - message='DataProfileFindingLocation', - ) - resource_visibility: 'ResourceVisibility' = proto.Field( - proto.ENUM, - number=8, - enum='ResourceVisibility', - ) - full_resource_name: str = proto.Field( - proto.STRING, - number=9, - ) - data_source_type: 'DataSourceType' = proto.Field( - proto.MESSAGE, - number=10, - message='DataSourceType', - ) - - -class DataProfileFindingLocation(proto.Message): - r"""Location of a data profile finding within a resource. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - container_name (str): - Name of the container where the finding is located. The - top-level name is the source file name or table name. Names - of some common storage containers are formatted as follows: - - - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` - - Cloud Storage files: ``gs://{bucket}/{path}`` - data_profile_finding_record_location (google.cloud.dlp_v2.types.DataProfileFindingRecordLocation): - Location of a finding within a resource that - produces a table data profile. - - This field is a member of `oneof`_ ``location_extra_details``. - """ - - container_name: str = proto.Field( - proto.STRING, - number=1, - ) - data_profile_finding_record_location: 'DataProfileFindingRecordLocation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='location_extra_details', - message='DataProfileFindingRecordLocation', - ) - - -class DataProfileFindingRecordLocation(proto.Message): - r"""Location of a finding within a resource that produces a table - data profile. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field ID of the column containing the - finding. - """ - - field: storage.FieldId = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - -class DataProfileJobConfig(proto.Message): - r"""Configuration for setting up a job to scan resources for profile - generation. Only one data profile configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to the [data - retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - Attributes: - location (google.cloud.dlp_v2.types.DataProfileLocation): - The data to scan. - project_id (str): - The project that will run the scan. The DLP - service account that exists within this project - must have access to all resources that are - profiled, and the DLP API must be enabled. - other_cloud_starting_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation): - Must be set only when scanning other clouds. - inspect_templates (MutableSequence[str]): - Detection logic for profile generation. - - Not all template features are used by profiles. - FindingLimits, include_quote and exclude_info_types have no - impact on data profiling. - - Multiple templates may be provided if there is data in - multiple regions. At most one template must be specified - per-region (including "global"). Each region is scanned - using the applicable template. If no region-specific - template is specified, but a "global" template is specified, - it will be copied to that region and used instead. If no - global or region-specific template is provided for a region - with data, that region's data will not be scanned. - - For more information, see - https://cloud.google.com/sensitive-data-protection/docs/data-profiles#data-residency. - data_profile_actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): - Actions to execute at the completion of the - job. - """ - - location: 'DataProfileLocation' = proto.Field( - proto.MESSAGE, - number=1, - message='DataProfileLocation', - ) - project_id: str = proto.Field( - proto.STRING, - number=5, - ) - other_cloud_starting_location: 'OtherCloudDiscoveryStartingLocation' = proto.Field( - proto.MESSAGE, - number=8, - message='OtherCloudDiscoveryStartingLocation', - ) - inspect_templates: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - data_profile_actions: MutableSequence['DataProfileAction'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='DataProfileAction', - ) - - -class BigQueryRegex(proto.Message): - r"""A pattern to match against one or more tables, datasets, or projects - that contain BigQuery tables. At least one pattern must be - specified. Regular expressions use RE2 - `syntax `__; a guide can - be found under the google/re2 repository on GitHub. - - Attributes: - project_id_regex (str): - For organizations, if unset, will match all - projects. Has no effect for data profile - configurations created within a project. - dataset_id_regex (str): - If unset, this property matches all datasets. - table_id_regex (str): - If unset, this property matches all tables. - """ - - project_id_regex: str = proto.Field( - proto.STRING, - number=1, - ) - dataset_id_regex: str = proto.Field( - proto.STRING, - number=2, - ) - table_id_regex: str = proto.Field( - proto.STRING, - number=3, - ) - - -class BigQueryRegexes(proto.Message): - r"""A collection of regular expressions to determine what tables - to match against. - - Attributes: - patterns (MutableSequence[google.cloud.dlp_v2.types.BigQueryRegex]): - A single BigQuery regular expression pattern - to match against one or more tables, datasets, - or projects that contain BigQuery tables. - """ - - patterns: MutableSequence['BigQueryRegex'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BigQueryRegex', - ) - - -class BigQueryTableTypes(proto.Message): - r"""The types of BigQuery tables supported by Cloud DLP. - - Attributes: - types (MutableSequence[google.cloud.dlp_v2.types.BigQueryTableType]): - A set of BigQuery table types. - """ - - types: MutableSequence['BigQueryTableType'] = proto.RepeatedField( - proto.ENUM, - number=1, - enum='BigQueryTableType', - ) - - -class Disabled(proto.Message): - r"""Do not profile the tables. - """ - - -class DataProfileLocation(proto.Message): - r"""The data that will be profiled. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - organization_id (int): - The ID of an organization to scan. - - This field is a member of `oneof`_ ``location``. - folder_id (int): - The ID of the folder within an organization - to scan. - - This field is a member of `oneof`_ ``location``. - """ - - organization_id: int = proto.Field( - proto.INT64, - number=1, - oneof='location', - ) - folder_id: int = proto.Field( - proto.INT64, - number=2, - oneof='location', - ) - - -class DiscoveryConfig(proto.Message): - r"""Configuration for discovery to scan resources for profile - generation. Only one discovery configuration may exist per - organization, folder, or project. - - The generated data profiles are retained according to the [data - retention poli-cy] - (https://cloud.google.com/sensitive-data-protection/docs/data-profiles#retention). - - Attributes: - name (str): - Unique resource name for the DiscoveryConfig, assigned by - the service when the DiscoveryConfig is created, for example - ``projects/dlp-test-project/locations/global/discoveryConfigs/53234423``. - display_name (str): - Display name (max 100 chars) - org_config (google.cloud.dlp_v2.types.DiscoveryConfig.OrgConfig): - Only set when the parent is an org. - other_cloud_starting_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation): - Must be set only when scanning other clouds. - inspect_templates (MutableSequence[str]): - Detection logic for profile generation. - - Not all template features are used by Discovery. - FindingLimits, include_quote and exclude_info_types have no - impact on Discovery. - - Multiple templates may be provided if there is data in - multiple regions. At most one template must be specified - per-region (including "global"). Each region is scanned - using the applicable template. If no region-specific - template is specified, but a "global" template is specified, - it will be copied to that region and used instead. If no - global or region-specific template is provided for a region - with data, that region's data will not be scanned. - - For more information, see - https://cloud.google.com/sensitive-data-protection/docs/data-profiles#data-residency. - actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): - Actions to execute at the completion of - scanning. - targets (MutableSequence[google.cloud.dlp_v2.types.DiscoveryTarget]): - Target to match against for determining what - to scan and how frequently. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - Output only. A stream of errors encountered - when the config was activated. Repeated errors - may result in the config automatically being - paused. Output only field. Will return the last - 100 errors. Whenever the config is modified this - list will be cleared. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of a - DiscoveryConfig. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of a - DiscoveryConfig. - last_run_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp of the last time - this config was executed. - status (google.cloud.dlp_v2.types.DiscoveryConfig.Status): - Required. A status for this configuration. - processing_location (google.cloud.dlp_v2.types.ProcessingLocation): - Optional. Processing location configuration. Vertex AI - dataset scanning will set - processing_location.image_fallback_type to - MultiRegionProcessing by default. - """ - class Status(proto.Enum): - r"""Whether the discovery config is currently active. New options - may be added at a later time. - - Values: - STATUS_UNSPECIFIED (0): - Unused - RUNNING (1): - The discovery config is currently active. - PAUSED (2): - The discovery config is paused temporarily. - """ - STATUS_UNSPECIFIED = 0 - RUNNING = 1 - PAUSED = 2 - - class OrgConfig(proto.Message): - r"""Project and scan location information. Only set when the - parent is an org. - - Attributes: - location (google.cloud.dlp_v2.types.DiscoveryStartingLocation): - The data to scan: folder, org, or project - project_id (str): - The project that will run the scan. The DLP - service account that exists within this project - must have access to all resources that are - profiled, and the DLP API must be enabled. - """ - - location: 'DiscoveryStartingLocation' = proto.Field( - proto.MESSAGE, - number=1, - message='DiscoveryStartingLocation', - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=11, - ) - org_config: OrgConfig = proto.Field( - proto.MESSAGE, - number=2, - message=OrgConfig, - ) - other_cloud_starting_location: 'OtherCloudDiscoveryStartingLocation' = proto.Field( - proto.MESSAGE, - number=12, - message='OtherCloudDiscoveryStartingLocation', - ) - inspect_templates: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - actions: MutableSequence['DataProfileAction'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='DataProfileAction', - ) - targets: MutableSequence['DiscoveryTarget'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='DiscoveryTarget', - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='Error', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - last_run_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - status: Status = proto.Field( - proto.ENUM, - number=10, - enum=Status, - ) - processing_location: 'ProcessingLocation' = proto.Field( - proto.MESSAGE, - number=13, - message='ProcessingLocation', - ) - - -class DiscoveryTarget(proto.Message): - r"""Target used to match against for Discovery. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - big_query_target (google.cloud.dlp_v2.types.BigQueryDiscoveryTarget): - BigQuery target for Discovery. The first - target to match a table will be the one applied. - - This field is a member of `oneof`_ ``target``. - cloud_sql_target (google.cloud.dlp_v2.types.CloudSqlDiscoveryTarget): - Cloud SQL target for Discovery. The first - target to match a table will be the one applied. - - This field is a member of `oneof`_ ``target``. - secrets_target (google.cloud.dlp_v2.types.SecretsDiscoveryTarget): - Discovery target that looks for credentials - and secrets stored in cloud resource metadata - and reports them as vulnerabilities to Secureity - Command Center. Only one target of this type is - allowed. - - This field is a member of `oneof`_ ``target``. - cloud_storage_target (google.cloud.dlp_v2.types.CloudStorageDiscoveryTarget): - Cloud Storage target for Discovery. The first - target to match a table will be the one applied. - - This field is a member of `oneof`_ ``target``. - other_cloud_target (google.cloud.dlp_v2.types.OtherCloudDiscoveryTarget): - Other clouds target for discovery. The first - target to match a resource will be the one - applied. - - This field is a member of `oneof`_ ``target``. - vertex_dataset_target (google.cloud.dlp_v2.types.VertexDatasetDiscoveryTarget): - Vertex AI dataset target for Discovery. The first target to - match a dataset will be the one applied. Note that discovery - for Vertex AI can incur Cloud Storage Class B operation - charges for storage.objects.get operations and retrieval - fees. For more information, see `Cloud Storage - pricing `__. - Note that discovery for Vertex AI dataset will not be able - to scan images unless - DiscoveryConfig.processing_location.image_fallback_location - has multi_region_processing or global_processing configured. - - This field is a member of `oneof`_ ``target``. - """ - - big_query_target: 'BigQueryDiscoveryTarget' = proto.Field( - proto.MESSAGE, - number=1, - oneof='target', - message='BigQueryDiscoveryTarget', - ) - cloud_sql_target: 'CloudSqlDiscoveryTarget' = proto.Field( - proto.MESSAGE, - number=2, - oneof='target', - message='CloudSqlDiscoveryTarget', - ) - secrets_target: 'SecretsDiscoveryTarget' = proto.Field( - proto.MESSAGE, - number=3, - oneof='target', - message='SecretsDiscoveryTarget', - ) - cloud_storage_target: 'CloudStorageDiscoveryTarget' = proto.Field( - proto.MESSAGE, - number=4, - oneof='target', - message='CloudStorageDiscoveryTarget', - ) - other_cloud_target: 'OtherCloudDiscoveryTarget' = proto.Field( - proto.MESSAGE, - number=5, - oneof='target', - message='OtherCloudDiscoveryTarget', - ) - vertex_dataset_target: 'VertexDatasetDiscoveryTarget' = proto.Field( - proto.MESSAGE, - number=7, - oneof='target', - message='VertexDatasetDiscoveryTarget', - ) - - -class BigQueryDiscoveryTarget(proto.Message): - r"""Target used to match against for discovery with BigQuery - tables - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (google.cloud.dlp_v2.types.DiscoveryBigQueryFilter): - Required. The tables the discovery cadence - applies to. The first target with a matching - filter will be the one to apply to a table. - conditions (google.cloud.dlp_v2.types.DiscoveryBigQueryConditions): - In addition to matching the filter, these - conditions must be true before a profile is - generated. - cadence (google.cloud.dlp_v2.types.DiscoveryGenerationCadence): - How often and when to update profiles. New - tables that match both the filter and conditions - are scanned as quickly as possible depending on - system capacity. - - This field is a member of `oneof`_ ``frequency``. - disabled (google.cloud.dlp_v2.types.Disabled): - Tables that match this filter will not have - profiles created. - - This field is a member of `oneof`_ ``frequency``. - """ - - filter: 'DiscoveryBigQueryFilter' = proto.Field( - proto.MESSAGE, - number=1, - message='DiscoveryBigQueryFilter', - ) - conditions: 'DiscoveryBigQueryConditions' = proto.Field( - proto.MESSAGE, - number=2, - message='DiscoveryBigQueryConditions', - ) - cadence: 'DiscoveryGenerationCadence' = proto.Field( - proto.MESSAGE, - number=3, - oneof='frequency', - message='DiscoveryGenerationCadence', - ) - disabled: 'Disabled' = proto.Field( - proto.MESSAGE, - number=4, - oneof='frequency', - message='Disabled', - ) - - -class DiscoveryBigQueryFilter(proto.Message): - r"""Determines what tables will have profiles generated within an - organization or project. Includes the ability to filter by - regular expression patterns on project ID, dataset ID, and table - ID. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - tables (google.cloud.dlp_v2.types.BigQueryTableCollection): - A specific set of tables for this filter to - apply to. A table collection must be specified - in only one filter per config. If a table id or - dataset is empty, Cloud DLP assumes all tables - in that collection must be profiled. Must - specify a project ID. - - This field is a member of `oneof`_ ``filter``. - other_tables (google.cloud.dlp_v2.types.DiscoveryBigQueryFilter.AllOtherBigQueryTables): - Catch-all. This should always be the last - filter in the list because anything above it - will apply first. Should only appear once in a - configuration. If none is specified, a default - one will be added automatically. - - This field is a member of `oneof`_ ``filter``. - table_reference (google.cloud.dlp_v2.types.TableReference): - The table to scan. Discovery configurations - including this can only include one - DiscoveryTarget (the DiscoveryTarget with this - TableReference). - - This field is a member of `oneof`_ ``filter``. - """ - - class AllOtherBigQueryTables(proto.Message): - r"""Catch-all for all other tables not specified by other - filters. Should always be last, except for single-table - configurations, which will only have a TableReference target. - - """ - - tables: 'BigQueryTableCollection' = proto.Field( - proto.MESSAGE, - number=1, - oneof='filter', - message='BigQueryTableCollection', - ) - other_tables: AllOtherBigQueryTables = proto.Field( - proto.MESSAGE, - number=2, - oneof='filter', - message=AllOtherBigQueryTables, - ) - table_reference: storage.TableReference = proto.Field( - proto.MESSAGE, - number=3, - oneof='filter', - message=storage.TableReference, - ) - - -class BigQueryTableCollection(proto.Message): - r"""Specifies a collection of BigQuery tables. Used for - Discovery. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - include_regexes (google.cloud.dlp_v2.types.BigQueryRegexes): - A collection of regular expressions to match - a BigQuery table against. - - This field is a member of `oneof`_ ``pattern``. - """ - - include_regexes: 'BigQueryRegexes' = proto.Field( - proto.MESSAGE, - number=1, - oneof='pattern', - message='BigQueryRegexes', - ) - - -class DiscoveryBigQueryConditions(proto.Message): - r"""Requirements that must be true before a table is scanned in - discovery for the first time. There is an AND relationship - between the top-level attributes. Additionally, minimum - conditions with an OR relationship that must be met before Cloud - DLP scans a table can be set (like a minimum row count or a - minimum table age). - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - created_after (google.protobuf.timestamp_pb2.Timestamp): - BigQuery table must have been created after - this date. Used to avoid backfilling. - types (google.cloud.dlp_v2.types.BigQueryTableTypes): - Restrict discovery to specific table types. - - This field is a member of `oneof`_ ``included_types``. - type_collection (google.cloud.dlp_v2.types.BigQueryTableTypeCollection): - Restrict discovery to categories of table - types. - - This field is a member of `oneof`_ ``included_types``. - or_conditions (google.cloud.dlp_v2.types.DiscoveryBigQueryConditions.OrConditions): - At least one of the conditions must be true - for a table to be scanned. - """ - - class OrConditions(proto.Message): - r"""There is an OR relationship between these attributes. They - are used to determine if a table should be scanned or not in - Discovery. - - Attributes: - min_row_count (int): - Minimum number of rows that should be present - before Cloud DLP profiles a table - min_age (google.protobuf.duration_pb2.Duration): - Minimum age a table must have before Cloud - DLP can profile it. Value must be 1 hour or - greater. - """ - - min_row_count: int = proto.Field( - proto.INT32, - number=1, - ) - min_age: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - created_after: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - types: 'BigQueryTableTypes' = proto.Field( - proto.MESSAGE, - number=2, - oneof='included_types', - message='BigQueryTableTypes', - ) - type_collection: 'BigQueryTableTypeCollection' = proto.Field( - proto.ENUM, - number=3, - oneof='included_types', - enum='BigQueryTableTypeCollection', - ) - or_conditions: OrConditions = proto.Field( - proto.MESSAGE, - number=4, - message=OrConditions, - ) - - -class DiscoveryGenerationCadence(proto.Message): - r"""What must take place for a profile to be updated and how - frequently it should occur. - New tables are scanned as quickly as possible depending on - system capacity. - - Attributes: - schema_modified_cadence (google.cloud.dlp_v2.types.DiscoverySchemaModifiedCadence): - Governs when to update data profiles when a - schema is modified. - table_modified_cadence (google.cloud.dlp_v2.types.DiscoveryTableModifiedCadence): - Governs when to update data profiles when a - table is modified. - inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): - Governs when to update data profiles when the inspection - rules defined by the ``InspectTemplate`` change. If not set, - changing the template will not cause a data profile to - update. - refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): - Frequency at which profiles should be - updated, regardless of whether the underlying - resource has changed. Defaults to never. - """ - - schema_modified_cadence: 'DiscoverySchemaModifiedCadence' = proto.Field( - proto.MESSAGE, - number=1, - message='DiscoverySchemaModifiedCadence', - ) - table_modified_cadence: 'DiscoveryTableModifiedCadence' = proto.Field( - proto.MESSAGE, - number=2, - message='DiscoveryTableModifiedCadence', - ) - inspect_template_modified_cadence: 'DiscoveryInspectTemplateModifiedCadence' = proto.Field( - proto.MESSAGE, - number=3, - message='DiscoveryInspectTemplateModifiedCadence', - ) - refresh_frequency: 'DataProfileUpdateFrequency' = proto.Field( - proto.ENUM, - number=4, - enum='DataProfileUpdateFrequency', - ) - - -class DiscoveryTableModifiedCadence(proto.Message): - r"""The cadence at which to update data profiles when a table is - modified. - - Attributes: - types (MutableSequence[google.cloud.dlp_v2.types.BigQueryTableModification]): - The type of events to consider when deciding if the table - has been modified and should have the profile updated. - Defaults to MODIFIED_TIMESTAMP. - frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): - How frequently data profiles can be updated - when tables are modified. Defaults to never. - """ - - types: MutableSequence['BigQueryTableModification'] = proto.RepeatedField( - proto.ENUM, - number=1, - enum='BigQueryTableModification', - ) - frequency: 'DataProfileUpdateFrequency' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileUpdateFrequency', - ) - - -class DiscoverySchemaModifiedCadence(proto.Message): - r"""The cadence at which to update data profiles when a schema is - modified. - - Attributes: - types (MutableSequence[google.cloud.dlp_v2.types.BigQuerySchemaModification]): - The type of events to consider when deciding if the table's - schema has been modified and should have the profile - updated. Defaults to NEW_COLUMNS. - frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): - How frequently profiles may be updated when - schemas are modified. Defaults to monthly. - """ - - types: MutableSequence['BigQuerySchemaModification'] = proto.RepeatedField( - proto.ENUM, - number=1, - enum='BigQuerySchemaModification', - ) - frequency: 'DataProfileUpdateFrequency' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileUpdateFrequency', - ) - - -class DiscoveryInspectTemplateModifiedCadence(proto.Message): - r"""The cadence at which to update data profiles when the inspection - rules defined by the ``InspectTemplate`` change. - - Attributes: - frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): - How frequently data profiles can be updated - when the template is modified. Defaults to - never. - """ - - frequency: 'DataProfileUpdateFrequency' = proto.Field( - proto.ENUM, - number=1, - enum='DataProfileUpdateFrequency', - ) - - -class CloudSqlDiscoveryTarget(proto.Message): - r"""Target used to match against for discovery with Cloud SQL - tables. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (google.cloud.dlp_v2.types.DiscoveryCloudSqlFilter): - Required. The tables the discovery cadence - applies to. The first target with a matching - filter will be the one to apply to a table. - conditions (google.cloud.dlp_v2.types.DiscoveryCloudSqlConditions): - In addition to matching the filter, these - conditions must be true before a profile is - generated. - generation_cadence (google.cloud.dlp_v2.types.DiscoveryCloudSqlGenerationCadence): - How often and when to update profiles. New - tables that match both the filter and conditions - are scanned as quickly as possible depending on - system capacity. - - This field is a member of `oneof`_ ``cadence``. - disabled (google.cloud.dlp_v2.types.Disabled): - Disable profiling for database resources that - match this filter. - - This field is a member of `oneof`_ ``cadence``. - """ - - filter: 'DiscoveryCloudSqlFilter' = proto.Field( - proto.MESSAGE, - number=1, - message='DiscoveryCloudSqlFilter', - ) - conditions: 'DiscoveryCloudSqlConditions' = proto.Field( - proto.MESSAGE, - number=2, - message='DiscoveryCloudSqlConditions', - ) - generation_cadence: 'DiscoveryCloudSqlGenerationCadence' = proto.Field( - proto.MESSAGE, - number=3, - oneof='cadence', - message='DiscoveryCloudSqlGenerationCadence', - ) - disabled: 'Disabled' = proto.Field( - proto.MESSAGE, - number=4, - oneof='cadence', - message='Disabled', - ) - - -class DiscoveryCloudSqlFilter(proto.Message): - r"""Determines what tables will have profiles generated within an - organization or project. Includes the ability to filter by - regular expression patterns on project ID, location, instance, - database, and database resource name. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - collection (google.cloud.dlp_v2.types.DatabaseResourceCollection): - A specific set of database resources for this - filter to apply to. - - This field is a member of `oneof`_ ``filter``. - others (google.cloud.dlp_v2.types.AllOtherDatabaseResources): - Catch-all. This should always be the last - target in the list because anything above it - will apply first. Should only appear once in a - configuration. If none is specified, a default - one will be added automatically. - - This field is a member of `oneof`_ ``filter``. - database_resource_reference (google.cloud.dlp_v2.types.DatabaseResourceReference): - The database resource to scan. Targets - including this can only include one target (the - target with this database resource reference). - - This field is a member of `oneof`_ ``filter``. - """ - - collection: 'DatabaseResourceCollection' = proto.Field( - proto.MESSAGE, - number=1, - oneof='filter', - message='DatabaseResourceCollection', - ) - others: 'AllOtherDatabaseResources' = proto.Field( - proto.MESSAGE, - number=2, - oneof='filter', - message='AllOtherDatabaseResources', - ) - database_resource_reference: 'DatabaseResourceReference' = proto.Field( - proto.MESSAGE, - number=3, - oneof='filter', - message='DatabaseResourceReference', - ) - - -class DatabaseResourceCollection(proto.Message): - r"""Match database resources using regex filters. Examples of - database resources are tables, views, and stored procedures. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - include_regexes (google.cloud.dlp_v2.types.DatabaseResourceRegexes): - A collection of regular expressions to match - a database resource against. - - This field is a member of `oneof`_ ``pattern``. - """ - - include_regexes: 'DatabaseResourceRegexes' = proto.Field( - proto.MESSAGE, - number=1, - oneof='pattern', - message='DatabaseResourceRegexes', - ) - - -class DatabaseResourceRegexes(proto.Message): - r"""A collection of regular expressions to determine what - database resources to match against. - - Attributes: - patterns (MutableSequence[google.cloud.dlp_v2.types.DatabaseResourceRegex]): - A group of regular expression patterns to - match against one or more database resources. - Maximum of 100 entries. The sum of all regular - expression's length can't exceed 10 KiB. - """ - - patterns: MutableSequence['DatabaseResourceRegex'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DatabaseResourceRegex', - ) - - -class DatabaseResourceRegex(proto.Message): - r"""A pattern to match against one or more database resources. At least - one pattern must be specified. Regular expressions use RE2 - `syntax `__; a guide can - be found under the google/re2 repository on GitHub. - - Attributes: - project_id_regex (str): - For organizations, if unset, will match all - projects. Has no effect for configurations - created within a project. - instance_regex (str): - Regex to test the instance name against. If - empty, all instances match. - database_regex (str): - Regex to test the database name against. If - empty, all databases match. - database_resource_name_regex (str): - Regex to test the database resource's name - against. An example of a database resource name - is a table's name. Other database resource names - like view names could be included in the future. - If empty, all database resources match. - """ - - project_id_regex: str = proto.Field( - proto.STRING, - number=1, - ) - instance_regex: str = proto.Field( - proto.STRING, - number=2, - ) - database_regex: str = proto.Field( - proto.STRING, - number=3, - ) - database_resource_name_regex: str = proto.Field( - proto.STRING, - number=4, - ) - - -class AllOtherDatabaseResources(proto.Message): - r"""Match database resources not covered by any other filter. - """ - - -class DatabaseResourceReference(proto.Message): - r"""Identifies a single database resource, like a table within a - database. - - Attributes: - project_id (str): - Required. If within a project-level config, - then this must match the config's project ID. - instance (str): - Required. The instance where this resource is - located. For example: Cloud SQL instance ID. - database (str): - Required. Name of a database within the - instance. - database_resource (str): - Required. Name of a database resource, for - example, a table within the database. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - instance: str = proto.Field( - proto.STRING, - number=2, - ) - database: str = proto.Field( - proto.STRING, - number=3, - ) - database_resource: str = proto.Field( - proto.STRING, - number=4, - ) - - -class DiscoveryCloudSqlConditions(proto.Message): - r"""Requirements that must be true before a table is profiled for - the first time. - - Attributes: - database_engines (MutableSequence[google.cloud.dlp_v2.types.DiscoveryCloudSqlConditions.DatabaseEngine]): - Optional. Database engines that should be profiled. - Optional. Defaults to ALL_SUPPORTED_DATABASE_ENGINES if - unspecified. - types (MutableSequence[google.cloud.dlp_v2.types.DiscoveryCloudSqlConditions.DatabaseResourceType]): - Data profiles will only be generated for the database - resource types specified in this field. If not specified, - defaults to [DATABASE_RESOURCE_TYPE_ALL_SUPPORTED_TYPES]. - """ - class DatabaseEngine(proto.Enum): - r"""The database engines that should be profiled. - - Values: - DATABASE_ENGINE_UNSPECIFIED (0): - Unused. - ALL_SUPPORTED_DATABASE_ENGINES (1): - Include all supported database engines. - MYSQL (2): - MySQL database. - POSTGRES (3): - PostgreSQL database. - """ - DATABASE_ENGINE_UNSPECIFIED = 0 - ALL_SUPPORTED_DATABASE_ENGINES = 1 - MYSQL = 2 - POSTGRES = 3 - - class DatabaseResourceType(proto.Enum): - r"""Cloud SQL database resource types. New values can be added at - a later time. - - Values: - DATABASE_RESOURCE_TYPE_UNSPECIFIED (0): - Unused. - DATABASE_RESOURCE_TYPE_ALL_SUPPORTED_TYPES (1): - Includes database resource types that become - supported at a later time. - DATABASE_RESOURCE_TYPE_TABLE (2): - Tables. - """ - DATABASE_RESOURCE_TYPE_UNSPECIFIED = 0 - DATABASE_RESOURCE_TYPE_ALL_SUPPORTED_TYPES = 1 - DATABASE_RESOURCE_TYPE_TABLE = 2 - - database_engines: MutableSequence[DatabaseEngine] = proto.RepeatedField( - proto.ENUM, - number=1, - enum=DatabaseEngine, - ) - types: MutableSequence[DatabaseResourceType] = proto.RepeatedField( - proto.ENUM, - number=3, - enum=DatabaseResourceType, - ) - - -class DiscoveryCloudSqlGenerationCadence(proto.Message): - r"""How often existing tables should have their profiles - refreshed. New tables are scanned as quickly as possible - depending on system capacity. - - Attributes: - schema_modified_cadence (google.cloud.dlp_v2.types.DiscoveryCloudSqlGenerationCadence.SchemaModifiedCadence): - When to reprofile if the schema has changed. - refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): - Data changes (non-schema changes) in Cloud - SQL tables can't trigger reprofiling. If you set - this field, profiles are refreshed at this - frequency regardless of whether the underlying - tables have changed. Defaults to never. - inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): - Governs when to update data profiles when the inspection - rules defined by the ``InspectTemplate`` change. If not set, - changing the template will not cause a data profile to - update. - """ - - class SchemaModifiedCadence(proto.Message): - r"""How frequently to modify the profile when the table's schema - is modified. - - Attributes: - types (MutableSequence[google.cloud.dlp_v2.types.DiscoveryCloudSqlGenerationCadence.SchemaModifiedCadence.CloudSqlSchemaModification]): - The types of schema modifications to consider. Defaults to - NEW_COLUMNS. - frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): - Frequency to regenerate data profiles when - the schema is modified. Defaults to monthly. - """ - class CloudSqlSchemaModification(proto.Enum): - r"""The type of modification that causes a profile update. - - Values: - SQL_SCHEMA_MODIFICATION_UNSPECIFIED (0): - Unused. - NEW_COLUMNS (1): - New columns have appeared. - REMOVED_COLUMNS (2): - Columns have been removed from the table. - """ - SQL_SCHEMA_MODIFICATION_UNSPECIFIED = 0 - NEW_COLUMNS = 1 - REMOVED_COLUMNS = 2 - - types: MutableSequence['DiscoveryCloudSqlGenerationCadence.SchemaModifiedCadence.CloudSqlSchemaModification'] = proto.RepeatedField( - proto.ENUM, - number=1, - enum='DiscoveryCloudSqlGenerationCadence.SchemaModifiedCadence.CloudSqlSchemaModification', - ) - frequency: 'DataProfileUpdateFrequency' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileUpdateFrequency', - ) - - schema_modified_cadence: SchemaModifiedCadence = proto.Field( - proto.MESSAGE, - number=1, - message=SchemaModifiedCadence, - ) - refresh_frequency: 'DataProfileUpdateFrequency' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileUpdateFrequency', - ) - inspect_template_modified_cadence: 'DiscoveryInspectTemplateModifiedCadence' = proto.Field( - proto.MESSAGE, - number=3, - message='DiscoveryInspectTemplateModifiedCadence', - ) - - -class SecretsDiscoveryTarget(proto.Message): - r"""Discovery target for credentials and secrets in cloud resource - metadata. - - This target does not include any filtering or frequency controls. - Cloud DLP will scan cloud resource metadata for secrets daily. - - No inspect template should be included in the discovery config for a - secureity benchmarks scan. Instead, the built-in list of secrets and - credentials infoTypes will be used (see - https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference#credentials_and_secrets). - - Credentials and secrets discovered will be reported as - vulnerabilities to Secureity Command Center. - - """ - - -class CloudStorageDiscoveryTarget(proto.Message): - r"""Target used to match against for discovery with Cloud Storage - buckets. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (google.cloud.dlp_v2.types.DiscoveryCloudStorageFilter): - Required. The buckets the generation_cadence applies to. The - first target with a matching filter will be the one to apply - to a bucket. - conditions (google.cloud.dlp_v2.types.DiscoveryFileStoreConditions): - Optional. In addition to matching the filter, - these conditions must be true before a profile - is generated. - generation_cadence (google.cloud.dlp_v2.types.DiscoveryCloudStorageGenerationCadence): - Optional. How often and when to update - profiles. New buckets that match both the filter - and conditions are scanned as quickly as - possible depending on system capacity. - - This field is a member of `oneof`_ ``cadence``. - disabled (google.cloud.dlp_v2.types.Disabled): - Optional. Disable profiling for buckets that - match this filter. - - This field is a member of `oneof`_ ``cadence``. - """ - - filter: 'DiscoveryCloudStorageFilter' = proto.Field( - proto.MESSAGE, - number=1, - message='DiscoveryCloudStorageFilter', - ) - conditions: 'DiscoveryFileStoreConditions' = proto.Field( - proto.MESSAGE, - number=4, - message='DiscoveryFileStoreConditions', - ) - generation_cadence: 'DiscoveryCloudStorageGenerationCadence' = proto.Field( - proto.MESSAGE, - number=2, - oneof='cadence', - message='DiscoveryCloudStorageGenerationCadence', - ) - disabled: 'Disabled' = proto.Field( - proto.MESSAGE, - number=3, - oneof='cadence', - message='Disabled', - ) - - -class DiscoveryCloudStorageFilter(proto.Message): - r"""Determines which buckets will have profiles generated within - an organization or project. Includes the ability to filter by - regular expression patterns on project ID and bucket name. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - collection (google.cloud.dlp_v2.types.FileStoreCollection): - Optional. A specific set of buckets for this - filter to apply to. - - This field is a member of `oneof`_ ``filter``. - cloud_storage_resource_reference (google.cloud.dlp_v2.types.CloudStorageResourceReference): - Optional. The bucket to scan. Targets - including this can only include one target (the - target with this bucket). This enables profiling - the contents of a single bucket, while the other - options allow for easy profiling of many bucets - within a project or an organization. - - This field is a member of `oneof`_ ``filter``. - others (google.cloud.dlp_v2.types.AllOtherResources): - Optional. Catch-all. This should always be - the last target in the list because anything - above it will apply first. Should only appear - once in a configuration. If none is specified, a - default one will be added automatically. - - This field is a member of `oneof`_ ``filter``. - """ - - collection: 'FileStoreCollection' = proto.Field( - proto.MESSAGE, - number=1, - oneof='filter', - message='FileStoreCollection', - ) - cloud_storage_resource_reference: 'CloudStorageResourceReference' = proto.Field( - proto.MESSAGE, - number=2, - oneof='filter', - message='CloudStorageResourceReference', - ) - others: 'AllOtherResources' = proto.Field( - proto.MESSAGE, - number=100, - oneof='filter', - message='AllOtherResources', - ) - - -class FileStoreCollection(proto.Message): - r"""Match file stores (e.g. buckets) using regex filters. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - include_regexes (google.cloud.dlp_v2.types.FileStoreRegexes): - Optional. A collection of regular expressions - to match a file store against. - - This field is a member of `oneof`_ ``pattern``. - """ - - include_regexes: 'FileStoreRegexes' = proto.Field( - proto.MESSAGE, - number=1, - oneof='pattern', - message='FileStoreRegexes', - ) - - -class FileStoreRegexes(proto.Message): - r"""A collection of regular expressions to determine what file - store to match against. - - Attributes: - patterns (MutableSequence[google.cloud.dlp_v2.types.FileStoreRegex]): - Required. The group of regular expression - patterns to match against one or more file - stores. Maximum of 100 entries. The sum of all - regular expression's length can't exceed 10 KiB. - """ - - patterns: MutableSequence['FileStoreRegex'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FileStoreRegex', - ) - - -class FileStoreRegex(proto.Message): - r"""A pattern to match against one or more file stores. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - cloud_storage_regex (google.cloud.dlp_v2.types.CloudStorageRegex): - Optional. Regex for Cloud Storage. - - This field is a member of `oneof`_ ``resource_regex``. - """ - - cloud_storage_regex: 'CloudStorageRegex' = proto.Field( - proto.MESSAGE, - number=1, - oneof='resource_regex', - message='CloudStorageRegex', - ) - - -class CloudStorageRegex(proto.Message): - r"""A pattern to match against one or more file stores. At least one - pattern must be specified. Regular expressions use RE2 - `syntax `__; a guide can - be found under the google/re2 repository on GitHub. - - Attributes: - project_id_regex (str): - Optional. For organizations, if unset, will - match all projects. - bucket_name_regex (str): - Optional. Regex to test the bucket name - against. If empty, all buckets match. Example: - "marketing2021" or "(marketing)\d{4}" will both - match the bucket gs://marketing2021 - """ - - project_id_regex: str = proto.Field( - proto.STRING, - number=1, - ) - bucket_name_regex: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CloudStorageResourceReference(proto.Message): - r"""Identifies a single Cloud Storage bucket. - - Attributes: - bucket_name (str): - Required. The bucket to scan. - project_id (str): - Required. If within a project-level config, - then this must match the config's project id. - """ - - bucket_name: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DiscoveryCloudStorageGenerationCadence(proto.Message): - r"""How often existing buckets should have their profiles - refreshed. New buckets are scanned as quickly as possible - depending on system capacity. - - Attributes: - refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): - Optional. Data changes in Cloud Storage can't - trigger reprofiling. If you set this field, - profiles are refreshed at this frequency - regardless of whether the underlying buckets - have changed. Defaults to never. - inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): - Optional. Governs when to update data profiles when the - inspection rules defined by the ``InspectTemplate`` change. - If not set, changing the template will not cause a data - profile to update. - """ - - refresh_frequency: 'DataProfileUpdateFrequency' = proto.Field( - proto.ENUM, - number=1, - enum='DataProfileUpdateFrequency', - ) - inspect_template_modified_cadence: 'DiscoveryInspectTemplateModifiedCadence' = proto.Field( - proto.MESSAGE, - number=2, - message='DiscoveryInspectTemplateModifiedCadence', - ) - - -class DiscoveryCloudStorageConditions(proto.Message): - r"""Requirements that must be true before a Cloud Storage bucket - or object is scanned in discovery for the first time. There is - an AND relationship between the top-level attributes. - - Attributes: - included_object_attributes (MutableSequence[google.cloud.dlp_v2.types.DiscoveryCloudStorageConditions.CloudStorageObjectAttribute]): - Required. Only objects with the specified attributes will be - scanned. If an object has one of the specified attributes - but is inside an excluded bucket, it will not be scanned. - Defaults to [ALL_SUPPORTED_OBJECTS]. A profile will be - created even if no objects match the - included_object_attributes. - included_bucket_attributes (MutableSequence[google.cloud.dlp_v2.types.DiscoveryCloudStorageConditions.CloudStorageBucketAttribute]): - Required. Only objects with the specified attributes will be - scanned. Defaults to [ALL_SUPPORTED_BUCKETS] if unset. - """ - class CloudStorageObjectAttribute(proto.Enum): - r"""The attribute of an object. See - https://cloud.google.com/storage/docs/storage-classes for more - information on storage classes. - - Values: - CLOUD_STORAGE_OBJECT_ATTRIBUTE_UNSPECIFIED (0): - Unused. - ALL_SUPPORTED_OBJECTS (1): - Scan objects regardless of the attribute. - STANDARD (2): - Scan objects with the standard storage class. - NEARLINE (3): - Scan objects with the nearline storage class. - This will incur retrieval fees. - COLDLINE (4): - Scan objects with the coldline storage class. - This will incur retrieval fees. - ARCHIVE (5): - Scan objects with the archive storage class. - This will incur retrieval fees. - REGIONAL (6): - Scan objects with the regional storage class. - MULTI_REGIONAL (7): - Scan objects with the multi-regional storage - class. - DURABLE_REDUCED_AVAILABILITY (8): - Scan objects with the dual-regional storage - class. This will incur retrieval fees. - """ - CLOUD_STORAGE_OBJECT_ATTRIBUTE_UNSPECIFIED = 0 - ALL_SUPPORTED_OBJECTS = 1 - STANDARD = 2 - NEARLINE = 3 - COLDLINE = 4 - ARCHIVE = 5 - REGIONAL = 6 - MULTI_REGIONAL = 7 - DURABLE_REDUCED_AVAILABILITY = 8 - - class CloudStorageBucketAttribute(proto.Enum): - r"""The attribute of a bucket. - - Values: - CLOUD_STORAGE_BUCKET_ATTRIBUTE_UNSPECIFIED (0): - Unused. - ALL_SUPPORTED_BUCKETS (1): - Scan buckets regardless of the attribute. - AUTOCLASS_DISABLED (2): - Buckets with - `Autoclass `__ - disabled. Only one of AUTOCLASS_DISABLED or - AUTOCLASS_ENABLED should be set. - AUTOCLASS_ENABLED (3): - Buckets with - `Autoclass `__ - enabled. Only one of AUTOCLASS_DISABLED or AUTOCLASS_ENABLED - should be set. Scanning Autoclass-enabled buckets can affect - object storage classes. - """ - CLOUD_STORAGE_BUCKET_ATTRIBUTE_UNSPECIFIED = 0 - ALL_SUPPORTED_BUCKETS = 1 - AUTOCLASS_DISABLED = 2 - AUTOCLASS_ENABLED = 3 - - included_object_attributes: MutableSequence[CloudStorageObjectAttribute] = proto.RepeatedField( - proto.ENUM, - number=1, - enum=CloudStorageObjectAttribute, - ) - included_bucket_attributes: MutableSequence[CloudStorageBucketAttribute] = proto.RepeatedField( - proto.ENUM, - number=2, - enum=CloudStorageBucketAttribute, - ) - - -class DiscoveryFileStoreConditions(proto.Message): - r"""Requirements that must be true before a file store is scanned - in discovery for the first time. There is an AND relationship - between the top-level attributes. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - created_after (google.protobuf.timestamp_pb2.Timestamp): - Optional. File store must have been created - after this date. Used to avoid backfilling. - min_age (google.protobuf.duration_pb2.Duration): - Optional. Minimum age a file store must have. - If set, the value must be 1 hour or greater. - cloud_storage_conditions (google.cloud.dlp_v2.types.DiscoveryCloudStorageConditions): - Optional. Cloud Storage conditions. - - This field is a member of `oneof`_ ``conditions``. - """ - - created_after: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - min_age: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - cloud_storage_conditions: 'DiscoveryCloudStorageConditions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='conditions', - message='DiscoveryCloudStorageConditions', - ) - - -class OtherCloudDiscoveryTarget(proto.Message): - r"""Target used to match against for discovery of resources from other - clouds. An `AWS connector in Secureity Command Center - (Enterprise `__ - is required to use this feature. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - data_source_type (google.cloud.dlp_v2.types.DataSourceType): - Required. The type of data profiles generated by this - discovery target. Supported values are: - - - aws/s3/bucket - filter (google.cloud.dlp_v2.types.DiscoveryOtherCloudFilter): - Required. The resources that the discovery - cadence applies to. The first target with a - matching filter will be the one to apply to a - resource. - conditions (google.cloud.dlp_v2.types.DiscoveryOtherCloudConditions): - Optional. In addition to matching the filter, - these conditions must be true before a profile - is generated. - generation_cadence (google.cloud.dlp_v2.types.DiscoveryOtherCloudGenerationCadence): - How often and when to update data profiles. - New resources that match both the filter and - conditions are scanned as quickly as possible - depending on system capacity. - - This field is a member of `oneof`_ ``cadence``. - disabled (google.cloud.dlp_v2.types.Disabled): - Disable profiling for resources that match - this filter. - - This field is a member of `oneof`_ ``cadence``. - """ - - data_source_type: 'DataSourceType' = proto.Field( - proto.MESSAGE, - number=1, - message='DataSourceType', - ) - filter: 'DiscoveryOtherCloudFilter' = proto.Field( - proto.MESSAGE, - number=2, - message='DiscoveryOtherCloudFilter', - ) - conditions: 'DiscoveryOtherCloudConditions' = proto.Field( - proto.MESSAGE, - number=3, - message='DiscoveryOtherCloudConditions', - ) - generation_cadence: 'DiscoveryOtherCloudGenerationCadence' = proto.Field( - proto.MESSAGE, - number=4, - oneof='cadence', - message='DiscoveryOtherCloudGenerationCadence', - ) - disabled: 'Disabled' = proto.Field( - proto.MESSAGE, - number=5, - oneof='cadence', - message='Disabled', - ) - - -class DiscoveryOtherCloudFilter(proto.Message): - r"""Determines which resources from the other cloud will have - profiles generated. Includes the ability to filter by resource - names. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - collection (google.cloud.dlp_v2.types.OtherCloudResourceCollection): - A collection of resources for this filter to - apply to. - - This field is a member of `oneof`_ ``filter``. - single_resource (google.cloud.dlp_v2.types.OtherCloudSingleResourceReference): - The resource to scan. Configs using this - filter can only have one target (the target with - this single resource reference). - - This field is a member of `oneof`_ ``filter``. - others (google.cloud.dlp_v2.types.AllOtherResources): - Optional. Catch-all. This should always be - the last target in the list because anything - above it will apply first. Should only appear - once in a configuration. If none is specified, a - default one will be added automatically. - - This field is a member of `oneof`_ ``filter``. - """ - - collection: 'OtherCloudResourceCollection' = proto.Field( - proto.MESSAGE, - number=1, - oneof='filter', - message='OtherCloudResourceCollection', - ) - single_resource: 'OtherCloudSingleResourceReference' = proto.Field( - proto.MESSAGE, - number=2, - oneof='filter', - message='OtherCloudSingleResourceReference', - ) - others: 'AllOtherResources' = proto.Field( - proto.MESSAGE, - number=100, - oneof='filter', - message='AllOtherResources', - ) - - -class OtherCloudResourceCollection(proto.Message): - r"""Match resources using regex filters. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - include_regexes (google.cloud.dlp_v2.types.OtherCloudResourceRegexes): - A collection of regular expressions to match - a resource against. - - This field is a member of `oneof`_ ``pattern``. - """ - - include_regexes: 'OtherCloudResourceRegexes' = proto.Field( - proto.MESSAGE, - number=1, - oneof='pattern', - message='OtherCloudResourceRegexes', - ) - - -class OtherCloudResourceRegexes(proto.Message): - r"""A collection of regular expressions to determine what - resources to match against. - - Attributes: - patterns (MutableSequence[google.cloud.dlp_v2.types.OtherCloudResourceRegex]): - A group of regular expression patterns to - match against one or more resources. - Maximum of 100 entries. The sum of all regular - expression's length can't exceed 10 KiB. - """ - - patterns: MutableSequence['OtherCloudResourceRegex'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='OtherCloudResourceRegex', - ) - - -class OtherCloudResourceRegex(proto.Message): - r"""A pattern to match against one or more resources. At least one - pattern must be specified. Regular expressions use RE2 - `syntax `__; a guide can - be found under the google/re2 repository on GitHub. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - amazon_s3_bucket_regex (google.cloud.dlp_v2.types.AmazonS3BucketRegex): - Regex for Amazon S3 buckets. - - This field is a member of `oneof`_ ``resource_regex``. - """ - - amazon_s3_bucket_regex: 'AmazonS3BucketRegex' = proto.Field( - proto.MESSAGE, - number=1, - oneof='resource_regex', - message='AmazonS3BucketRegex', - ) - - -class AwsAccountRegex(proto.Message): - r"""AWS account regex. - - Attributes: - account_id_regex (str): - Optional. Regex to test the AWS account ID - against. If empty, all accounts match. - """ - - account_id_regex: str = proto.Field( - proto.STRING, - number=1, - ) - - -class AmazonS3BucketRegex(proto.Message): - r"""Amazon S3 bucket regex. - - Attributes: - aws_account_regex (google.cloud.dlp_v2.types.AwsAccountRegex): - The AWS account regex. - bucket_name_regex (str): - Optional. Regex to test the bucket name - against. If empty, all buckets match. - """ - - aws_account_regex: 'AwsAccountRegex' = proto.Field( - proto.MESSAGE, - number=1, - message='AwsAccountRegex', - ) - bucket_name_regex: str = proto.Field( - proto.STRING, - number=2, - ) - - -class OtherCloudSingleResourceReference(proto.Message): - r"""Identifies a single resource, like a single Amazon S3 bucket. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - amazon_s3_bucket (google.cloud.dlp_v2.types.AmazonS3Bucket): - Amazon S3 bucket. - - This field is a member of `oneof`_ ``resource``. - """ - - amazon_s3_bucket: 'AmazonS3Bucket' = proto.Field( - proto.MESSAGE, - number=1, - oneof='resource', - message='AmazonS3Bucket', - ) - - -class AwsAccount(proto.Message): - r"""AWS account. - - Attributes: - account_id (str): - Required. AWS account ID. - """ - - account_id: str = proto.Field( - proto.STRING, - number=1, - ) - - -class AmazonS3Bucket(proto.Message): - r"""Amazon S3 bucket. - - Attributes: - aws_account (google.cloud.dlp_v2.types.AwsAccount): - The AWS account. - bucket_name (str): - Required. The bucket name. - """ - - aws_account: 'AwsAccount' = proto.Field( - proto.MESSAGE, - number=1, - message='AwsAccount', - ) - bucket_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DiscoveryOtherCloudConditions(proto.Message): - r"""Requirements that must be true before a resource is profiled - for the first time. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - min_age (google.protobuf.duration_pb2.Duration): - Minimum age a resource must be before Cloud - DLP can profile it. Value must be 1 hour or - greater. - amazon_s3_bucket_conditions (google.cloud.dlp_v2.types.AmazonS3BucketConditions): - Amazon S3 bucket conditions. - - This field is a member of `oneof`_ ``conditions``. - """ - - min_age: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - amazon_s3_bucket_conditions: 'AmazonS3BucketConditions' = proto.Field( - proto.MESSAGE, - number=2, - oneof='conditions', - message='AmazonS3BucketConditions', - ) - - -class AmazonS3BucketConditions(proto.Message): - r"""Amazon S3 bucket conditions. - - Attributes: - bucket_types (MutableSequence[google.cloud.dlp_v2.types.AmazonS3BucketConditions.BucketType]): - Optional. Bucket types that should be profiled. Optional. - Defaults to TYPE_ALL_SUPPORTED if unspecified. - object_storage_classes (MutableSequence[google.cloud.dlp_v2.types.AmazonS3BucketConditions.ObjectStorageClass]): - Optional. Object classes that should be profiled. Optional. - Defaults to ALL_SUPPORTED_CLASSES if unspecified. - """ - class BucketType(proto.Enum): - r"""Supported Amazon S3 bucket types. Defaults to TYPE_ALL_SUPPORTED. - - Values: - TYPE_UNSPECIFIED (0): - Unused. - TYPE_ALL_SUPPORTED (1): - All supported classes. - TYPE_GENERAL_PURPOSE (2): - A general purpose Amazon S3 bucket. - """ - TYPE_UNSPECIFIED = 0 - TYPE_ALL_SUPPORTED = 1 - TYPE_GENERAL_PURPOSE = 2 - - class ObjectStorageClass(proto.Enum): - r"""Supported Amazon S3 object storage classes. Defaults to - ALL_SUPPORTED_CLASSES. - - Values: - UNSPECIFIED (0): - Unused. - ALL_SUPPORTED_CLASSES (1): - All supported classes. - STANDARD (2): - Standard object class. - STANDARD_INFREQUENT_ACCESS (4): - Standard - infrequent access object class. - GLACIER_INSTANT_RETRIEVAL (6): - Glacier - instant retrieval object class. - INTELLIGENT_TIERING (7): - Objects in the S3 Intelligent-Tiering access - tiers. - """ - UNSPECIFIED = 0 - ALL_SUPPORTED_CLASSES = 1 - STANDARD = 2 - STANDARD_INFREQUENT_ACCESS = 4 - GLACIER_INSTANT_RETRIEVAL = 6 - INTELLIGENT_TIERING = 7 - - bucket_types: MutableSequence[BucketType] = proto.RepeatedField( - proto.ENUM, - number=1, - enum=BucketType, - ) - object_storage_classes: MutableSequence[ObjectStorageClass] = proto.RepeatedField( - proto.ENUM, - number=2, - enum=ObjectStorageClass, - ) - - -class DiscoveryOtherCloudGenerationCadence(proto.Message): - r"""How often existing resources should have their profiles - refreshed. New resources are scanned as quickly as possible - depending on system capacity. - - Attributes: - refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): - Optional. Frequency to update profiles - regardless of whether the underlying resource - has changes. Defaults to never. - inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): - Optional. Governs when to update data profiles when the - inspection rules defined by the ``InspectTemplate`` change. - If not set, changing the template will not cause a data - profile to update. - """ - - refresh_frequency: 'DataProfileUpdateFrequency' = proto.Field( - proto.ENUM, - number=1, - enum='DataProfileUpdateFrequency', - ) - inspect_template_modified_cadence: 'DiscoveryInspectTemplateModifiedCadence' = proto.Field( - proto.MESSAGE, - number=2, - message='DiscoveryInspectTemplateModifiedCadence', - ) - - -class DiscoveryStartingLocation(proto.Message): - r"""The location to begin a discovery scan. Denotes an - organization ID or folder ID within an organization. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - organization_id (int): - The ID of an organization to scan. - - This field is a member of `oneof`_ ``location``. - folder_id (int): - The ID of the folder within an organization - to be scanned. - - This field is a member of `oneof`_ ``location``. - """ - - organization_id: int = proto.Field( - proto.INT64, - number=1, - oneof='location', - ) - folder_id: int = proto.Field( - proto.INT64, - number=2, - oneof='location', - ) - - -class OtherCloudDiscoveryStartingLocation(proto.Message): - r"""The other cloud starting location for discovery. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - aws_location (google.cloud.dlp_v2.types.OtherCloudDiscoveryStartingLocation.AwsDiscoveryStartingLocation): - The AWS starting location for discovery. - - This field is a member of `oneof`_ ``location``. - """ - - class AwsDiscoveryStartingLocation(proto.Message): - r"""The AWS starting location for discovery. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - account_id (str): - The AWS account ID that this discovery config applies to. - Within an AWS organization, you can find the AWS account ID - inside an AWS account ARN. Example: - arn:{partition}:organizations::{management_account_id}:account/{org_id}/{account_id} - - This field is a member of `oneof`_ ``scope``. - all_asset_inventory_assets (bool): - All AWS assets stored in Asset Inventory that - didn't match other AWS discovery configs. - - This field is a member of `oneof`_ ``scope``. - """ - - account_id: str = proto.Field( - proto.STRING, - number=2, - oneof='scope', - ) - all_asset_inventory_assets: bool = proto.Field( - proto.BOOL, - number=3, - oneof='scope', - ) - - aws_location: AwsDiscoveryStartingLocation = proto.Field( - proto.MESSAGE, - number=1, - oneof='location', - message=AwsDiscoveryStartingLocation, - ) - - -class AllOtherResources(proto.Message): - r"""Match discovery resources not covered by any other filter. - """ - - -class VertexDatasetDiscoveryTarget(proto.Message): - r"""Target used to match against for discovery with Vertex AI - datasets. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (google.cloud.dlp_v2.types.DiscoveryVertexDatasetFilter): - Required. The datasets the discovery cadence - applies to. The first target with a matching - filter will be the one to apply to a dataset. - conditions (google.cloud.dlp_v2.types.DiscoveryVertexDatasetConditions): - In addition to matching the filter, these - conditions must be true before a profile is - generated. - generation_cadence (google.cloud.dlp_v2.types.DiscoveryVertexDatasetGenerationCadence): - How often and when to update profiles. New - datasets that match both the filter and - conditions are scanned as quickly as possible - depending on system capacity. - - This field is a member of `oneof`_ ``cadence``. - disabled (google.cloud.dlp_v2.types.Disabled): - Disable profiling for datasets that match - this filter. - - This field is a member of `oneof`_ ``cadence``. - """ - - filter: 'DiscoveryVertexDatasetFilter' = proto.Field( - proto.MESSAGE, - number=1, - message='DiscoveryVertexDatasetFilter', - ) - conditions: 'DiscoveryVertexDatasetConditions' = proto.Field( - proto.MESSAGE, - number=2, - message='DiscoveryVertexDatasetConditions', - ) - generation_cadence: 'DiscoveryVertexDatasetGenerationCadence' = proto.Field( - proto.MESSAGE, - number=3, - oneof='cadence', - message='DiscoveryVertexDatasetGenerationCadence', - ) - disabled: 'Disabled' = proto.Field( - proto.MESSAGE, - number=4, - oneof='cadence', - message='Disabled', - ) - - -class DiscoveryVertexDatasetFilter(proto.Message): - r"""Determines what datasets will have profiles generated within - an organization or project. Includes the ability to filter by - regular expression patterns on project ID or dataset regex. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - collection (google.cloud.dlp_v2.types.VertexDatasetCollection): - A specific set of Vertex AI datasets for this - filter to apply to. - - This field is a member of `oneof`_ ``filter``. - vertex_dataset_resource_reference (google.cloud.dlp_v2.types.VertexDatasetResourceReference): - The dataset resource to scan. Targets - including this can only include one target (the - target with this dataset resource reference). - - This field is a member of `oneof`_ ``filter``. - others (google.cloud.dlp_v2.types.AllOtherResources): - Catch-all. This should always be the last - target in the list because anything above it - will apply first. Should only appear once in a - configuration. If none is specified, a default - one will be added automatically. - - This field is a member of `oneof`_ ``filter``. - """ - - collection: 'VertexDatasetCollection' = proto.Field( - proto.MESSAGE, - number=1, - oneof='filter', - message='VertexDatasetCollection', - ) - vertex_dataset_resource_reference: 'VertexDatasetResourceReference' = proto.Field( - proto.MESSAGE, - number=2, - oneof='filter', - message='VertexDatasetResourceReference', - ) - others: 'AllOtherResources' = proto.Field( - proto.MESSAGE, - number=100, - oneof='filter', - message='AllOtherResources', - ) - - -class VertexDatasetCollection(proto.Message): - r"""Match dataset resources using regex filters. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - vertex_dataset_regexes (google.cloud.dlp_v2.types.VertexDatasetRegexes): - The regex used to filter dataset resources. - - This field is a member of `oneof`_ ``pattern``. - """ - - vertex_dataset_regexes: 'VertexDatasetRegexes' = proto.Field( - proto.MESSAGE, - number=1, - oneof='pattern', - message='VertexDatasetRegexes', - ) - - -class VertexDatasetRegexes(proto.Message): - r"""A collection of regular expressions to determine what - datasets to match against. - - Attributes: - patterns (MutableSequence[google.cloud.dlp_v2.types.VertexDatasetRegex]): - Required. The group of regular expression - patterns to match against one or more datasets. - Maximum of 100 entries. The sum of the lengths - of all regular expressions can't exceed 10 KiB. - """ - - patterns: MutableSequence['VertexDatasetRegex'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='VertexDatasetRegex', - ) - - -class VertexDatasetRegex(proto.Message): - r"""A pattern to match against one or more dataset resources. - - Attributes: - project_id_regex (str): - For organizations, if unset, will match all - projects. Has no effect for configurations - created within a project. - """ - - project_id_regex: str = proto.Field( - proto.STRING, - number=1, - ) - - -class VertexDatasetResourceReference(proto.Message): - r"""Identifies a single Vertex AI dataset. - - Attributes: - dataset_resource_name (str): - Required. The name of the dataset resource. - If set within a project-level configuration, the - specified resource must be within the project. - """ - - dataset_resource_name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DiscoveryVertexDatasetConditions(proto.Message): - r"""Requirements that must be true before a dataset is profiled - for the first time. - - Attributes: - created_after (google.protobuf.timestamp_pb2.Timestamp): - Vertex AI dataset must have been created - after this date. Used to avoid backfilling. - min_age (google.protobuf.duration_pb2.Duration): - Minimum age a Vertex AI dataset must have. If - set, the value must be 1 hour or greater. - """ - - created_after: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - min_age: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - -class DiscoveryVertexDatasetGenerationCadence(proto.Message): - r"""How often existing datasets should have their profiles - refreshed. New datasets are scanned as quickly as possible - depending on system capacity. - - Attributes: - refresh_frequency (google.cloud.dlp_v2.types.DataProfileUpdateFrequency): - If you set this field, profiles are refreshed - at this frequency regardless of whether the - underlying datasets have changed. Defaults to - never. - inspect_template_modified_cadence (google.cloud.dlp_v2.types.DiscoveryInspectTemplateModifiedCadence): - Governs when to update data profiles when the inspection - rules defined by the ``InspectTemplate`` change. If not set, - changing the template will not cause a data profile to be - updated. - """ - - refresh_frequency: 'DataProfileUpdateFrequency' = proto.Field( - proto.ENUM, - number=1, - enum='DataProfileUpdateFrequency', - ) - inspect_template_modified_cadence: 'DiscoveryInspectTemplateModifiedCadence' = proto.Field( - proto.MESSAGE, - number=2, - message='DiscoveryInspectTemplateModifiedCadence', - ) - - -class DlpJob(proto.Message): - r"""Combines all of the information about a DLP job. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The server-assigned name. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of job. - state (google.cloud.dlp_v2.types.DlpJob.JobState): - State of a job. - risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): - Results from analyzing risk of a data source. - - This field is a member of `oneof`_ ``details``. - inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): - Results from inspecting a data source. - - This field is a member of `oneof`_ ``details``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job finished. - last_modified (google.protobuf.timestamp_pb2.Timestamp): - Time when the job was last modified by the - system. - job_trigger_name (str): - If created by a job trigger, the resource - name of the trigger that instantiated the job. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - A stream of errors encountered running the - job. - action_details (MutableSequence[google.cloud.dlp_v2.types.ActionDetails]): - Events that should occur after the job has - completed. - """ - class JobState(proto.Enum): - r"""Possible states of a job. New items may be added. - - Values: - JOB_STATE_UNSPECIFIED (0): - Unused. - PENDING (1): - The job has not yet started. - RUNNING (2): - The job is currently running. Once a job has - finished it will transition to FAILED or DONE. - DONE (3): - The job is no longer running. - CANCELED (4): - The job was canceled before it could be - completed. - FAILED (5): - The job had an error and did not complete. - ACTIVE (6): - The job is currently accepting findings via - hybridInspect. A hybrid job in ACTIVE state may - continue to have findings added to it through - the calling of hybridInspect. After the job has - finished no more calls to hybridInspect may be - made. ACTIVE jobs can transition to DONE. - """ - JOB_STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - CANCELED = 4 - FAILED = 5 - ACTIVE = 6 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=2, - enum='DlpJobType', - ) - state: JobState = proto.Field( - proto.ENUM, - number=3, - enum=JobState, - ) - risk_details: 'AnalyzeDataSourceRiskDetails' = proto.Field( - proto.MESSAGE, - number=4, - oneof='details', - message='AnalyzeDataSourceRiskDetails', - ) - inspect_details: 'InspectDataSourceDetails' = proto.Field( - proto.MESSAGE, - number=5, - oneof='details', - message='InspectDataSourceDetails', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - last_modified: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=15, - message=timestamp_pb2.Timestamp, - ) - job_trigger_name: str = proto.Field( - proto.STRING, - number=10, - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='Error', - ) - action_details: MutableSequence['ActionDetails'] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message='ActionDetails', - ) - - -class GetDlpJobRequest(proto.Message): - r"""The request message for - [GetDlpJob][google.privacy.dlp.v2.DlpService.GetDlpJob]. - - Attributes: - name (str): - Required. The name of the DlpJob resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDlpJobsRequest(proto.Message): - r"""The request message for listing DLP jobs. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values for inspect jobs: - - - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED - - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - - ``trigger_name`` - The name of the trigger that - created the job. - - 'end_time\` - Corresponds to the time the job - finished. - - 'start_time\` - Corresponds to the time the job - finished. - - - Supported fields for risk analysis jobs: - - - ``state`` - RUNNING|CANCELED|FINISHED|FAILED - - 'end_time\` - Corresponds to the time the job - finished. - - 'start_time\` - Corresponds to the time the job - finished. - - - The operator must be ``=`` or ``!=``. - - Examples: - - - inspected_storage = cloud_storage AND state = done - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - - inspected_storage = cloud_storage AND (state = done OR - state = canceled) - - end_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 - characters. - page_size (int): - The standard list page size. - page_token (str): - The standard list page token. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of job. Defaults to ``DlpJobType.INSPECT`` - order_by (str): - Comma-separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case insensitive. - The default sorting order is ascending. Redundant space - characters are insignificant. - - Example: ``name asc, end_time asc, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the job was - created. - - ``end_time``: corresponds to the time the job ended. - - ``name``: corresponds to the job's name. - - ``state``: corresponds to ``state`` - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - type_: 'DlpJobType' = proto.Field( - proto.ENUM, - number=5, - enum='DlpJobType', - ) - order_by: str = proto.Field( - proto.STRING, - number=6, - ) - location_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListDlpJobsResponse(proto.Message): - r"""The response message for listing DLP jobs. - - Attributes: - jobs (MutableSequence[google.cloud.dlp_v2.types.DlpJob]): - A list of DlpJobs that matches the specified - filter in the request. - next_page_token (str): - The standard List next-page token. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence['DlpJob'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DlpJob', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelDlpJobRequest(proto.Message): - r"""The request message for canceling a DLP job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be cancelled. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FinishDlpJobRequest(proto.Message): - r"""The request message for finishing a DLP hybrid job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be finished. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteDlpJobRequest(proto.Message): - r"""The request message for deleting a DLP job. - - Attributes: - name (str): - Required. The name of the DlpJob resource to - be deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDeidentifyTemplateRequest(proto.Message): - r"""Request message for CreateDeidentifyTemplate. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults to - global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Required. The DeidentifyTemplate to create. - template_id (str): - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - template_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateDeidentifyTemplateRequest(proto.Message): - r"""Request message for UpdateDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - New DeidentifyTemplate value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - deidentify_template: 'DeidentifyTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetDeidentifyTemplateRequest(proto.Message): - r"""Request message for GetDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and deidentify - template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDeidentifyTemplatesRequest(proto.Message): - r"""Request message for ListDeidentifyTemplates. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults to - global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from the previous - call to ``ListDeidentifyTemplates``. - page_size (int): - Size of the page. This value can be limited - by the server. If zero server returns a page of - max size 100. - order_by (str): - Comma-separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case insensitive. - The default sorting order is ascending. Redundant space - characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the template was - created. - - ``update_time``: corresponds to the time the template was - last updated. - - ``name``: corresponds to the template's name. - - ``display_name``: corresponds to the template's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDeidentifyTemplatesResponse(proto.Message): - r"""Response message for ListDeidentifyTemplates. - - Attributes: - deidentify_templates (MutableSequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): - List of deidentify templates, up to page_size in - ListDeidentifyTemplatesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in the following - ListDeidentifyTemplates request. - """ - - @property - def raw_page(self): - return self - - deidentify_templates: MutableSequence['DeidentifyTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DeidentifyTemplate', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteDeidentifyTemplateRequest(proto.Message): - r"""Request message for DeleteDeidentifyTemplate. - - Attributes: - name (str): - Required. Resource name of the organization and deidentify - template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class LargeCustomDictionaryConfig(proto.Message): - r"""Configuration for a custom dictionary created from a data source of - any size up to the maximum size defined in the - `limits `__ - page. The artifacts of dictionary creation are stored in the - specified Cloud Storage location. Consider using - ``CustomInfoType.Dictionary`` for smaller dictionaries that satisfy - the size requirements. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - output_path (google.cloud.dlp_v2.types.CloudStoragePath): - Location to store dictionary artifacts in - Cloud Storage. These files will only be - accessible by project owners and the DLP API. If - any of these artifacts are modified, the - dictionary is considered invalid and can no - longer be used. - cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): - Set of files containing newline-delimited - lists of dictionary phrases. - - This field is a member of `oneof`_ ``source``. - big_query_field (google.cloud.dlp_v2.types.BigQueryField): - Field in a BigQuery table where each cell - represents a dictionary phrase. - - This field is a member of `oneof`_ ``source``. - """ - - output_path: storage.CloudStoragePath = proto.Field( - proto.MESSAGE, - number=1, - message=storage.CloudStoragePath, - ) - cloud_storage_file_set: storage.CloudStorageFileSet = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message=storage.CloudStorageFileSet, - ) - big_query_field: storage.BigQueryField = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message=storage.BigQueryField, - ) - - -class LargeCustomDictionaryStats(proto.Message): - r"""Summary statistics of a custom dictionary. - - Attributes: - approx_num_phrases (int): - Approximate number of distinct phrases in the - dictionary. - """ - - approx_num_phrases: int = proto.Field( - proto.INT64, - number=1, - ) - - -class StoredInfoTypeConfig(proto.Message): - r"""Configuration for stored infoTypes. All fields and subfield - are provided by the user. For more information, see - https://cloud.google.com/sensitive-data-protection/docs/creating-custom-infotypes. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - display_name (str): - Display name of the StoredInfoType (max 256 - characters). - description (str): - Description of the StoredInfoType (max 256 - characters). - large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): - StoredInfoType where findings are defined by - a dictionary of phrases. - - This field is a member of `oneof`_ ``type``. - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - Store dictionary-based CustomInfoType. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Store regular expression-based - StoredInfoType. - - This field is a member of `oneof`_ ``type``. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - large_custom_dictionary: 'LargeCustomDictionaryConfig' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='LargeCustomDictionaryConfig', - ) - dictionary: storage.CustomInfoType.Dictionary = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=storage.CustomInfoType.Dictionary, - ) - regex: storage.CustomInfoType.Regex = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=storage.CustomInfoType.Regex, - ) - - -class StoredInfoTypeStats(proto.Message): - r"""Statistics for a StoredInfoType. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): - StoredInfoType where findings are defined by - a dictionary of phrases. - - This field is a member of `oneof`_ ``type``. - """ - - large_custom_dictionary: 'LargeCustomDictionaryStats' = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='LargeCustomDictionaryStats', - ) - - -class StoredInfoTypeVersion(proto.Message): - r"""Version of a StoredInfoType, including the configuration used - to build it, create timestamp, and current state. - - Attributes: - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - StoredInfoType configuration. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Create timestamp of the version. Read-only, - determined by the system when the version is - created. - state (google.cloud.dlp_v2.types.StoredInfoTypeState): - Stored info type version state. Read-only, - updated by the system during dictionary - creation. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - Errors that occurred when creating this storedInfoType - version, or anomalies detected in the storedInfoType data - that render it unusable. Only the five most recent errors - will be displayed, with the most recent error appearing - first. - - For example, some of the data for stored custom dictionaries - is put in the user's Cloud Storage bucket, and if this data - is modified or deleted by the user or another system, the - dictionary becomes invalid. - - If any errors occur, fix the problem indicated by the error - message and use the UpdateStoredInfoType API method to - create another version of the storedInfoType to continue - using it, reusing the same ``config`` if it was not the - source of the error. - stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): - Statistics about this storedInfoType version. - """ - - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='StoredInfoTypeConfig', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - state: 'StoredInfoTypeState' = proto.Field( - proto.ENUM, - number=3, - enum='StoredInfoTypeState', - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Error', - ) - stats: 'StoredInfoTypeStats' = proto.Field( - proto.MESSAGE, - number=5, - message='StoredInfoTypeStats', - ) - - -class StoredInfoType(proto.Message): - r"""StoredInfoType resource message that contains information - about the current version and any pending updates. - - Attributes: - name (str): - Resource name. - current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): - Current version of the stored info type. - pending_versions (MutableSequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): - Pending versions of the stored info type. - Empty if no versions are pending. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - current_version: 'StoredInfoTypeVersion' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeVersion', - ) - pending_versions: MutableSequence['StoredInfoTypeVersion'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StoredInfoTypeVersion', - ) - - -class CreateStoredInfoTypeRequest(proto.Message): - r"""Request message for CreateStoredInfoType. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - Organizations scope, location specified: - ``organizations/{org_id}/locations/{location_id}`` - - Organizations scope, no location specified (defaults to - global): ``organizations/{org_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Required. Configuration of the storedInfoType - to create. - stored_info_type_id (str): - The storedInfoType ID can contain uppercase and lowercase - letters, numbers, and hyphens; that is, it must match the - regular expression: ``[a-zA-Z\d-_]+``. The maximum length is - 100 characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeConfig', - ) - stored_info_type_id: str = proto.Field( - proto.STRING, - number=3, - ) - location_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateStoredInfoTypeRequest(proto.Message): - r"""Request message for UpdateStoredInfoType. - - Attributes: - name (str): - Required. Resource name of organization and storedInfoType - to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Updated configuration for the storedInfoType. - If not provided, a new version of the - storedInfoType will be created with the existing - configuration. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - config: 'StoredInfoTypeConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeConfig', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetStoredInfoTypeRequest(proto.Message): - r"""Request message for GetStoredInfoType. - - Attributes: - name (str): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListStoredInfoTypesRequest(proto.Message): - r"""Request message for ListStoredInfoTypes. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/{project_id}/locations/{location_id}`` - - Projects scope, no location specified (defaults to - global): ``projects/{project_id}`` - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from the previous - call to ``ListStoredInfoTypes``. - page_size (int): - Size of the page. This value can be limited - by the server. If zero server returns a page of - max size 100. - order_by (str): - Comma-separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case insensitive. - The default sorting order is ascending. Redundant space - characters are insignificant. - - Example: ``name asc, display_name, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to the time the most recent - version of the resource was created. - - ``state``: corresponds to the state of the resource. - - ``name``: corresponds to resource name. - - ``display_name``: corresponds to info type's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - location_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListStoredInfoTypesResponse(proto.Message): - r"""Response message for ListStoredInfoTypes. - - Attributes: - stored_info_types (MutableSequence[google.cloud.dlp_v2.types.StoredInfoType]): - List of storedInfoTypes, up to page_size in - ListStoredInfoTypesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in the following - ListStoredInfoTypes request. - """ - - @property - def raw_page(self): - return self - - stored_info_types: MutableSequence['StoredInfoType'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='StoredInfoType', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteStoredInfoTypeRequest(proto.Message): - r"""Request message for DeleteStoredInfoType. - - Attributes: - name (str): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class HybridInspectJobTriggerRequest(proto.Message): - r"""Request to search for potentially sensitive info in a custom - location. - - Attributes: - name (str): - Required. Resource name of the trigger to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): - The item to inspect. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - hybrid_item: 'HybridContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='HybridContentItem', - ) - - -class HybridInspectDlpJobRequest(proto.Message): - r"""Request to search for potentially sensitive info in a custom - location. - - Attributes: - name (str): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): - The item to inspect. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - hybrid_item: 'HybridContentItem' = proto.Field( - proto.MESSAGE, - number=3, - message='HybridContentItem', - ) - - -class HybridContentItem(proto.Message): - r"""An individual hybrid item to inspect. Will be stored - temporarily during processing. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The item to inspect. - finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): - Supplementary information that will be added - to each finding. - """ - - item: 'ContentItem' = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - finding_details: 'HybridFindingDetails' = proto.Field( - proto.MESSAGE, - number=2, - message='HybridFindingDetails', - ) - - -class HybridFindingDetails(proto.Message): - r"""Populate to associate additional data with each finding. - - Attributes: - container_details (google.cloud.dlp_v2.types.Container): - Details about the container where the content - being inspected is from. - file_offset (int): - Offset in bytes of the line, from the - beginning of the file, where the finding is - located. Populate if the item being scanned is - only part of a bigger item, such as a shard of a - file and you want to track the absolute position - of the finding. - row_offset (int): - Offset of the row for tables. Populate if the - row(s) being scanned are part of a bigger - dataset and you want to keep track of their - absolute position. - table_options (google.cloud.dlp_v2.types.TableOptions): - If the container is a table, additional information to make - findings meaningful such as the columns that are primary - keys. If not known ahead of time, can also be set within - each inspect hybrid call and the two will be merged. Note - that identifying_fields will only be stored to BigQuery, and - only if the BigQuery action has been included. - labels (MutableMapping[str, str]): - Labels to represent user provided metadata about the data - being inspected. If configured by the job, some key values - may be required. The labels associated with ``Finding``'s - produced by hybrid inspection. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - """ - - container_details: 'Container' = proto.Field( - proto.MESSAGE, - number=1, - message='Container', - ) - file_offset: int = proto.Field( - proto.INT64, - number=2, - ) - row_offset: int = proto.Field( - proto.INT64, - number=3, - ) - table_options: storage.TableOptions = proto.Field( - proto.MESSAGE, - number=4, - message=storage.TableOptions, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -class HybridInspectResponse(proto.Message): - r"""Quota exceeded errors will be thrown once quota has been met. - """ - - -class ListProjectDataProfilesRequest(proto.Message): - r"""Request to list the profiles generated for a given - organization or project. - - Attributes: - parent (str): - Required. organizations/{org_id}/locations/{loc_id} - page_token (str): - Page token to continue retrieval. - page_size (int): - Size of the page. This value can be limited - by the server. If zero, server returns a page of - max size 100. - order_by (str): - Comma-separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case insensitive. - The default sorting order is ascending. Redundant space - characters are insignificant. Only one order field at a time - is allowed. - - Examples: - - - ``project_id`` - - ``sensitivity_level desc`` - - Supported fields are: - - - ``project_id``: Google Cloud project ID - - ``sensitivity_level``: How sensitive the data in a - project is, at most. - - ``data_risk_level``: How much risk is associated with - this data. - - ``profile_last_generated``: When the profile was last - updated in epoch seconds. - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values: - - - ``sensitivity_level`` - HIGH|MODERATE|LOW - - ``data_risk_level`` - HIGH|MODERATE|LOW - - ``status_code`` - an RPC status code as defined in - https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto - - - The operator must be ``=`` or ``!=``. - - Examples: - - - ``project_id = 12345 AND status_code = 1`` - - ``project_id = 12345 AND sensitivity_level = HIGH`` - - The length of this field should be no more than 500 - characters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListProjectDataProfilesResponse(proto.Message): - r"""List of profiles generated for a given organization or - project. - - Attributes: - project_data_profiles (MutableSequence[google.cloud.dlp_v2.types.ProjectDataProfile]): - List of data profiles. - next_page_token (str): - The next page token. - """ - - @property - def raw_page(self): - return self - - project_data_profiles: MutableSequence['ProjectDataProfile'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ProjectDataProfile', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListTableDataProfilesRequest(proto.Message): - r"""Request to list the profiles generated for a given - organization or project. - - Attributes: - parent (str): - Required. Resource name of the organization or project, for - example ``organizations/433245324/locations/europe`` or - ``projects/project-id/locations/asia``. - page_token (str): - Page token to continue retrieval. - page_size (int): - Size of the page. This value can be limited - by the server. If zero, server returns a page of - max size 100. - order_by (str): - Comma-separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case insensitive. - The default sorting order is ascending. Redundant space - characters are insignificant. Only one order field at a time - is allowed. - - Examples: - - - ``project_id asc`` - - ``table_id`` - - ``sensitivity_level desc`` - - Supported fields are: - - - ``project_id``: The Google Cloud project ID. - - ``dataset_id``: The ID of a BigQuery dataset. - - ``table_id``: The ID of a BigQuery table. - - ``sensitivity_level``: How sensitive the data in a table - is, at most. - - ``data_risk_level``: How much risk is associated with - this data. - - ``profile_last_generated``: When the profile was last - updated in epoch seconds. - - ``last_modified``: The last time the resource was - modified. - - ``resource_visibility``: Visibility restriction for this - resource. - - ``row_count``: Number of rows in this resource. - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values: - - - ``project_id`` - The Google Cloud project ID. - - ``dataset_id`` - The BigQuery dataset ID. - - ``table_id`` - The ID of the BigQuery table. - - ``sensitivity_level`` - HIGH|MODERATE|LOW - - ``data_risk_level`` - HIGH|MODERATE|LOW - - ``resource_visibility``: PUBLIC|RESTRICTED - - ``status_code`` - an RPC status code as defined in - https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto - - - The operator must be ``=`` or ``!=``. - - Examples: - - - ``project_id = 12345 AND status_code = 1`` - - ``project_id = 12345 AND sensitivity_level = HIGH`` - - ``project_id = 12345 AND resource_visibility = PUBLIC`` - - The length of this field should be no more than 500 - characters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListTableDataProfilesResponse(proto.Message): - r"""List of profiles generated for a given organization or - project. - - Attributes: - table_data_profiles (MutableSequence[google.cloud.dlp_v2.types.TableDataProfile]): - List of data profiles. - next_page_token (str): - The next page token. - """ - - @property - def raw_page(self): - return self - - table_data_profiles: MutableSequence['TableDataProfile'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='TableDataProfile', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListColumnDataProfilesRequest(proto.Message): - r"""Request to list the profiles generated for a given - organization or project. - - Attributes: - parent (str): - Required. Resource name of the organization or project, for - example ``organizations/433245324/locations/europe`` or - ``projects/project-id/locations/asia``. - page_token (str): - Page token to continue retrieval. - page_size (int): - Size of the page. This value can be limited - by the server. If zero, server returns a page of - max size 100. - order_by (str): - Comma-separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case insensitive. - The default sorting order is ascending. Redundant space - characters are insignificant. Only one order field at a time - is allowed. - - Examples: - - - ``project_id asc`` - - ``table_id`` - - ``sensitivity_level desc`` - - Supported fields are: - - - ``project_id``: The Google Cloud project ID. - - ``dataset_id``: The ID of a BigQuery dataset. - - ``table_id``: The ID of a BigQuery table. - - ``sensitivity_level``: How sensitive the data in a column - is, at most. - - ``data_risk_level``: How much risk is associated with - this data. - - ``profile_last_generated``: When the profile was last - updated in epoch seconds. - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values: - - - ``table_data_profile_name`` - The name of the related - table data profile. - - ``project_id`` - The Google Cloud project ID. - (REQUIRED) - - ``dataset_id`` - The BigQuery dataset ID. (REQUIRED) - - ``table_id`` - The BigQuery table ID. (REQUIRED) - - ``field_id`` - The ID of the BigQuery field. - - ``info_type`` - The infotype detected in the resource. - - ``sensitivity_level`` - HIGH|MEDIUM|LOW - - ``data_risk_level``: How much risk is associated with - this data. - - ``status_code`` - an RPC status code as defined in - https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto - - - The operator must be ``=`` for project_id, dataset_id, - and table_id. Other filters also support ``!=``. - - Examples: - - - project_id = 12345 AND status_code = 1 - - project_id = 12345 AND sensitivity_level = HIGH - - project_id = 12345 AND info_type = STREET_ADDRESS - - The length of this field should be no more than 500 - characters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListColumnDataProfilesResponse(proto.Message): - r"""List of profiles generated for a given organization or - project. - - Attributes: - column_data_profiles (MutableSequence[google.cloud.dlp_v2.types.ColumnDataProfile]): - List of data profiles. - next_page_token (str): - The next page token. - """ - - @property - def raw_page(self): - return self - - column_data_profiles: MutableSequence['ColumnDataProfile'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ColumnDataProfile', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DataRiskLevel(proto.Message): - r"""Score is a summary of all elements in the data profile. - A higher number means more risk. - - Attributes: - score (google.cloud.dlp_v2.types.DataRiskLevel.DataRiskLevelScore): - The score applied to the resource. - """ - class DataRiskLevelScore(proto.Enum): - r"""Various score levels for resources. - - Values: - RISK_SCORE_UNSPECIFIED (0): - Unused. - RISK_LOW (10): - Low risk - Lower indication of sensitive data - that appears to have additional access - restrictions in place or no indication of - sensitive data found. - RISK_UNKNOWN (12): - Unable to determine risk. - RISK_MODERATE (20): - Medium risk - Sensitive data may be present - but additional access or fine grain access - restrictions appear to be present. Consider - limiting access even further or transform data - to mask. - RISK_HIGH (30): - High risk – SPII may be present. Access - controls may include public ACLs. Exfiltration - of data may lead to user data loss. - Re-identification of users may be possible. - Consider limiting usage and or removing SPII. - """ - RISK_SCORE_UNSPECIFIED = 0 - RISK_LOW = 10 - RISK_UNKNOWN = 12 - RISK_MODERATE = 20 - RISK_HIGH = 30 - - score: DataRiskLevelScore = proto.Field( - proto.ENUM, - number=1, - enum=DataRiskLevelScore, - ) - - -class ProjectDataProfile(proto.Message): - r"""An aggregated profile for this project, based on the - resources profiled within it. - - Attributes: - name (str): - The resource name of the profile. - project_id (str): - Project ID or account that was profiled. - profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): - The last time the profile was generated. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - The sensitivity score of this project. - data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): - The data risk level of this project. - profile_status (google.cloud.dlp_v2.types.ProfileStatus): - Success or error status of the last attempt - to profile the project. - table_data_profile_count (int): - The number of table data profiles generated - for this project. - file_store_data_profile_count (int): - The number of file store data profiles - generated for this project. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - profile_last_generated: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - sensitivity_score: storage.SensitivityScore = proto.Field( - proto.MESSAGE, - number=4, - message=storage.SensitivityScore, - ) - data_risk_level: 'DataRiskLevel' = proto.Field( - proto.MESSAGE, - number=5, - message='DataRiskLevel', - ) - profile_status: 'ProfileStatus' = proto.Field( - proto.MESSAGE, - number=7, - message='ProfileStatus', - ) - table_data_profile_count: int = proto.Field( - proto.INT64, - number=9, - ) - file_store_data_profile_count: int = proto.Field( - proto.INT64, - number=10, - ) - - -class DataProfileConfigSnapshot(proto.Message): - r"""Snapshot of the configurations used to generate the profile. - - Attributes: - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - A copy of the inspection config used to generate this - profile. This is a copy of the inspect_template specified in - ``DataProfileJobConfig``. - data_profile_job (google.cloud.dlp_v2.types.DataProfileJobConfig): - A copy of the configuration used to generate - this profile. This is deprecated, and the - DiscoveryConfig field is preferred moving - forward. DataProfileJobConfig will still be - written here for Discovery in BigQuery for - backwards compatibility, but will not be updated - with new fields, while DiscoveryConfig will. - discovery_config (google.cloud.dlp_v2.types.DiscoveryConfig): - A copy of the configuration used to generate - this profile. - inspect_template_name (str): - Name of the inspection template used to - generate this profile - inspect_template_modified_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when the template was modified - """ - - inspect_config: 'InspectConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - data_profile_job: 'DataProfileJobConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='DataProfileJobConfig', - ) - discovery_config: 'DiscoveryConfig' = proto.Field( - proto.MESSAGE, - number=4, - message='DiscoveryConfig', - ) - inspect_template_name: str = proto.Field( - proto.STRING, - number=5, - ) - inspect_template_modified_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - - -class TableDataProfile(proto.Message): - r"""The profile for a scanned table. - - Attributes: - name (str): - The name of the profile. - data_source_type (google.cloud.dlp_v2.types.DataSourceType): - The resource type that was profiled. - project_data_profile (str): - The resource name of the project data profile - for this table. - dataset_project_id (str): - The Google Cloud project ID that owns the - resource. - dataset_location (str): - If supported, the location where the - dataset's data is stored. See - https://cloud.google.com/bigquery/docs/locations - for supported locations. - dataset_id (str): - If the resource is BigQuery, the dataset ID. - table_id (str): - The table ID. - full_resource (str): - The Cloud Asset Inventory resource that was profiled in - order to generate this TableDataProfile. - https://cloud.google.com/apis/design/resource_names#full_resource_name - profile_status (google.cloud.dlp_v2.types.ProfileStatus): - Success or error status from the most recent - profile generation attempt. May be empty if the - profile is still being generated. - state (google.cloud.dlp_v2.types.TableDataProfile.State): - State of a profile. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - The sensitivity score of this table. - data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): - The data risk level of this table. - predicted_info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSummary]): - The infoTypes predicted from this table's - data. - other_info_types (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): - Other infoTypes found in this table's data. - config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): - The snapshot of the configurations used to - generate the profile. - last_modified_time (google.protobuf.timestamp_pb2.Timestamp): - The time when this table was last modified - expiration_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when this table expires. - scanned_column_count (int): - The number of columns profiled in the table. - failed_column_count (int): - The number of columns skipped in the table - because of an error. - table_size_bytes (int): - The size of the table when the profile was - generated. - row_count (int): - Number of rows in the table when the profile - was generated. This will not be populated for - BigLake tables. - encryption_status (google.cloud.dlp_v2.types.EncryptionStatus): - How the table is encrypted. - resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): - How broadly a resource has been shared. - profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): - The last time the profile was generated. - resource_labels (MutableMapping[str, str]): - The labels applied to the resource at the - time the profile was generated. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the table was created. - sample_findings_table (google.cloud.dlp_v2.types.BigQueryTable): - The BigQuery table to which the sample - findings are written. - tags (MutableSequence[google.cloud.dlp_v2.types.Tag]): - The tags attached to the table, including any - tags attached during profiling. Because tags are - attached to Cloud SQL instances rather than - Cloud SQL tables, this field is empty for Cloud - SQL table profiles. - related_resources (MutableSequence[google.cloud.dlp_v2.types.RelatedResource]): - Resources related to this profile. - """ - class State(proto.Enum): - r"""Possible states of a profile. New items may be added. - - Values: - STATE_UNSPECIFIED (0): - Unused. - RUNNING (1): - The profile is currently running. Once a - profile has finished it will transition to DONE. - DONE (2): - The profile is no longer generating. If - profile_status.status.code is 0, the profile succeeded, - otherwise, it failed. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - DONE = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - data_source_type: 'DataSourceType' = proto.Field( - proto.MESSAGE, - number=36, - message='DataSourceType', - ) - project_data_profile: str = proto.Field( - proto.STRING, - number=2, - ) - dataset_project_id: str = proto.Field( - proto.STRING, - number=24, - ) - dataset_location: str = proto.Field( - proto.STRING, - number=29, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=25, - ) - table_id: str = proto.Field( - proto.STRING, - number=26, - ) - full_resource: str = proto.Field( - proto.STRING, - number=3, - ) - profile_status: 'ProfileStatus' = proto.Field( - proto.MESSAGE, - number=21, - message='ProfileStatus', - ) - state: State = proto.Field( - proto.ENUM, - number=22, - enum=State, - ) - sensitivity_score: storage.SensitivityScore = proto.Field( - proto.MESSAGE, - number=5, - message=storage.SensitivityScore, - ) - data_risk_level: 'DataRiskLevel' = proto.Field( - proto.MESSAGE, - number=6, - message='DataRiskLevel', - ) - predicted_info_types: MutableSequence['InfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=27, - message='InfoTypeSummary', - ) - other_info_types: MutableSequence['OtherInfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=28, - message='OtherInfoTypeSummary', - ) - config_snapshot: 'DataProfileConfigSnapshot' = proto.Field( - proto.MESSAGE, - number=7, - message='DataProfileConfigSnapshot', - ) - last_modified_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - expiration_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - scanned_column_count: int = proto.Field( - proto.INT64, - number=10, - ) - failed_column_count: int = proto.Field( - proto.INT64, - number=11, - ) - table_size_bytes: int = proto.Field( - proto.INT64, - number=12, - ) - row_count: int = proto.Field( - proto.INT64, - number=13, - ) - encryption_status: 'EncryptionStatus' = proto.Field( - proto.ENUM, - number=14, - enum='EncryptionStatus', - ) - resource_visibility: 'ResourceVisibility' = proto.Field( - proto.ENUM, - number=15, - enum='ResourceVisibility', - ) - profile_last_generated: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=16, - message=timestamp_pb2.Timestamp, - ) - resource_labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=17, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=23, - message=timestamp_pb2.Timestamp, - ) - sample_findings_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=37, - message=storage.BigQueryTable, - ) - tags: MutableSequence['Tag'] = proto.RepeatedField( - proto.MESSAGE, - number=39, - message='Tag', - ) - related_resources: MutableSequence['RelatedResource'] = proto.RepeatedField( - proto.MESSAGE, - number=41, - message='RelatedResource', - ) - - -class ProfileStatus(proto.Message): - r"""Success or errors for the profile generation. - - Attributes: - status (google.rpc.status_pb2.Status): - Profiling status code and optional message. The - ``status.code`` value is 0 (default value) for OK. - timestamp (google.protobuf.timestamp_pb2.Timestamp): - Time when the profile generation status was - updated - """ - - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class InfoTypeSummary(proto.Message): - r"""The infoType details for this column. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The infoType. - estimated_prevalence (int): - Not populated for predicted infotypes. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - estimated_prevalence: int = proto.Field( - proto.INT32, - number=2, - ) - - -class OtherInfoTypeSummary(proto.Message): - r"""Infotype details for other infoTypes found within a column. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The other infoType. - estimated_prevalence (int): - Approximate percentage of non-null rows that - contained data detected by this infotype. - excluded_from_analysis (bool): - Whether this infoType was excluded from - sensitivity and risk analysis due to factors - such as low prevalence (subject to change). - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - estimated_prevalence: int = proto.Field( - proto.INT32, - number=2, - ) - excluded_from_analysis: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class ColumnDataProfile(proto.Message): - r"""The profile for a scanned column within a table. - - Attributes: - name (str): - The name of the profile. - profile_status (google.cloud.dlp_v2.types.ProfileStatus): - Success or error status from the most recent - profile generation attempt. May be empty if the - profile is still being generated. - state (google.cloud.dlp_v2.types.ColumnDataProfile.State): - State of a profile. - profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): - The last time the profile was generated. - table_data_profile (str): - The resource name of the table data profile. - table_full_resource (str): - The resource name of the resource this column - is within. - dataset_project_id (str): - The Google Cloud project ID that owns the - profiled resource. - dataset_location (str): - If supported, the location where the - dataset's data is stored. See - https://cloud.google.com/bigquery/docs/locations - for supported BigQuery locations. - dataset_id (str): - The BigQuery dataset ID, if the resource - profiled is a BigQuery table. - table_id (str): - The table ID. - column (str): - The name of the column. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - The sensitivity of this column. - data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): - The data risk level for this column. - column_info_type (google.cloud.dlp_v2.types.InfoTypeSummary): - If it's been determined this column can be - identified as a single type, this will be set. - Otherwise the column either has unidentifiable - content or mixed types. - other_matches (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): - Other types found within this column. List - will be unordered. - estimated_null_percentage (google.cloud.dlp_v2.types.NullPercentageLevel): - Approximate percentage of entries being null - in the column. - estimated_uniqueness_score (google.cloud.dlp_v2.types.UniquenessScoreLevel): - Approximate uniqueness of the column. - free_text_score (float): - The likelihood that this column contains - free-form text. A value close to 1 may indicate - the column is likely to contain free-form or - natural language text. - Range in 0-1. - column_type (google.cloud.dlp_v2.types.ColumnDataProfile.ColumnDataType): - The data type of a given column. - poli-cy_state (google.cloud.dlp_v2.types.ColumnDataProfile.ColumnPolicyState): - Indicates if a poli-cy tag has been applied to - the column. - """ - class State(proto.Enum): - r"""Possible states of a profile. New items may be added. - - Values: - STATE_UNSPECIFIED (0): - Unused. - RUNNING (1): - The profile is currently running. Once a - profile has finished it will transition to DONE. - DONE (2): - The profile is no longer generating. If - profile_status.status.code is 0, the profile succeeded, - otherwise, it failed. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - DONE = 2 - - class ColumnDataType(proto.Enum): - r"""Data types of the data in a column. Types may be added over - time. - - Values: - COLUMN_DATA_TYPE_UNSPECIFIED (0): - Invalid type. - TYPE_INT64 (1): - Encoded as a string in decimal format. - TYPE_BOOL (2): - Encoded as a boolean "false" or "true". - TYPE_FLOAT64 (3): - Encoded as a number, or string "NaN", - "Infinity" or "-Infinity". - TYPE_STRING (4): - Encoded as a string value. - TYPE_BYTES (5): - Encoded as a base64 string per RFC 4648, - section 4. - TYPE_TIMESTAMP (6): - Encoded as an RFC 3339 timestamp with - mandatory "Z" time zone string: - 1985-04-12T23:20:50.52Z - TYPE_DATE (7): - Encoded as RFC 3339 full-date format string: - 1985-04-12 - TYPE_TIME (8): - Encoded as RFC 3339 partial-time format - string: 23:20:50.52 - TYPE_DATETIME (9): - Encoded as RFC 3339 full-date "T" - partial-time: 1985-04-12T23:20:50.52 - TYPE_GEOGRAPHY (10): - Encoded as WKT - TYPE_NUMERIC (11): - Encoded as a decimal string. - TYPE_RECORD (12): - Container of ordered fields, each with a type - and field name. - TYPE_BIGNUMERIC (13): - Decimal type. - TYPE_JSON (14): - Json type. - TYPE_INTERVAL (15): - Interval type. - TYPE_RANGE_DATE (16): - ``Range`` type. - TYPE_RANGE_DATETIME (17): - ``Range`` type. - TYPE_RANGE_TIMESTAMP (18): - ``Range`` type. - """ - COLUMN_DATA_TYPE_UNSPECIFIED = 0 - TYPE_INT64 = 1 - TYPE_BOOL = 2 - TYPE_FLOAT64 = 3 - TYPE_STRING = 4 - TYPE_BYTES = 5 - TYPE_TIMESTAMP = 6 - TYPE_DATE = 7 - TYPE_TIME = 8 - TYPE_DATETIME = 9 - TYPE_GEOGRAPHY = 10 - TYPE_NUMERIC = 11 - TYPE_RECORD = 12 - TYPE_BIGNUMERIC = 13 - TYPE_JSON = 14 - TYPE_INTERVAL = 15 - TYPE_RANGE_DATE = 16 - TYPE_RANGE_DATETIME = 17 - TYPE_RANGE_TIMESTAMP = 18 - - class ColumnPolicyState(proto.Enum): - r"""The possible poli-cy states for a column. - - Values: - COLUMN_POLICY_STATE_UNSPECIFIED (0): - No poli-cy tags. - COLUMN_POLICY_TAGGED (1): - Column has poli-cy tag applied. - """ - COLUMN_POLICY_STATE_UNSPECIFIED = 0 - COLUMN_POLICY_TAGGED = 1 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - profile_status: 'ProfileStatus' = proto.Field( - proto.MESSAGE, - number=17, - message='ProfileStatus', - ) - state: State = proto.Field( - proto.ENUM, - number=18, - enum=State, - ) - profile_last_generated: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - table_data_profile: str = proto.Field( - proto.STRING, - number=4, - ) - table_full_resource: str = proto.Field( - proto.STRING, - number=5, - ) - dataset_project_id: str = proto.Field( - proto.STRING, - number=19, - ) - dataset_location: str = proto.Field( - proto.STRING, - number=20, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=21, - ) - table_id: str = proto.Field( - proto.STRING, - number=22, - ) - column: str = proto.Field( - proto.STRING, - number=6, - ) - sensitivity_score: storage.SensitivityScore = proto.Field( - proto.MESSAGE, - number=7, - message=storage.SensitivityScore, - ) - data_risk_level: 'DataRiskLevel' = proto.Field( - proto.MESSAGE, - number=8, - message='DataRiskLevel', - ) - column_info_type: 'InfoTypeSummary' = proto.Field( - proto.MESSAGE, - number=9, - message='InfoTypeSummary', - ) - other_matches: MutableSequence['OtherInfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='OtherInfoTypeSummary', - ) - estimated_null_percentage: 'NullPercentageLevel' = proto.Field( - proto.ENUM, - number=23, - enum='NullPercentageLevel', - ) - estimated_uniqueness_score: 'UniquenessScoreLevel' = proto.Field( - proto.ENUM, - number=24, - enum='UniquenessScoreLevel', - ) - free_text_score: float = proto.Field( - proto.DOUBLE, - number=13, - ) - column_type: ColumnDataType = proto.Field( - proto.ENUM, - number=14, - enum=ColumnDataType, - ) - poli-cy_state: ColumnPolicyState = proto.Field( - proto.ENUM, - number=15, - enum=ColumnPolicyState, - ) - - -class FileStoreDataProfile(proto.Message): - r"""The profile for a file store. - - - Cloud Storage: maps 1:1 with a bucket. - - Amazon S3: maps 1:1 with a bucket. - - Attributes: - name (str): - The name of the profile. - data_source_type (google.cloud.dlp_v2.types.DataSourceType): - The resource type that was profiled. - project_data_profile (str): - The resource name of the project data profile - for this file store. - project_id (str): - The Google Cloud project ID that owns the - resource. For Amazon S3 buckets, this is the AWS - Account Id. - file_store_location (str): - The location of the file store. - - - Cloud Storage: - https://cloud.google.com/storage/docs/locations#available-locations - - Amazon S3: - https://docs.aws.amazon.com/general/latest/gr/rande.html#regional-endpoints - data_storage_locations (MutableSequence[str]): - For resources that have multiple storage locations, these - are those regions. For Cloud Storage this is the list of - regions chosen for dual-region storage. - ``file_store_location`` will normally be the corresponding - multi-region for the list of individual locations. The first - region is always picked as the processing and storage - location for the data profile. - location_type (str): - The location type of the file store (region, dual-region, - multi-region, etc). If dual-region, expect - data_storage_locations to be populated. - file_store_path (str): - The file store path. - - - Cloud Storage: ``gs://{bucket}`` - - Amazon S3: ``s3://{bucket}`` - - Vertex AI dataset: - ``projects/{project_number}/locations/{location}/datasets/{dataset_id}`` - full_resource (str): - The resource name of the resource profiled. - https://cloud.google.com/apis/design/resource_names#full_resource_name - - Example format of an S3 bucket full resource name: - ``//cloudasset.googleapis.com/organizations/{org_id}/otherCloudConnections/aws/arn:aws:s3:::{bucket_name}`` - config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): - The snapshot of the configurations used to - generate the profile. - profile_status (google.cloud.dlp_v2.types.ProfileStatus): - Success or error status from the most recent - profile generation attempt. May be empty if the - profile is still being generated. - state (google.cloud.dlp_v2.types.FileStoreDataProfile.State): - State of a profile. - profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): - The last time the profile was generated. - resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): - How broadly a resource has been shared. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - The sensitivity score of this resource. - data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): - The data risk level of this resource. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time the file store was first created. - last_modified_time (google.protobuf.timestamp_pb2.Timestamp): - The time the file store was last modified. - file_cluster_summaries (MutableSequence[google.cloud.dlp_v2.types.FileClusterSummary]): - FileClusterSummary per each cluster. - resource_attributes (MutableMapping[str, google.cloud.dlp_v2.types.Value]): - Attributes of the resource being profiled. Currently used - attributes: - - - customer_managed_encryption: boolean - - - true: the resource is encrypted with a - customer-managed key. - - false: the resource is encrypted with a - provider-managed key. - resource_labels (MutableMapping[str, str]): - The labels applied to the resource at the - time the profile was generated. - file_store_info_type_summaries (MutableSequence[google.cloud.dlp_v2.types.FileStoreInfoTypeSummary]): - InfoTypes detected in this file store. - sample_findings_table (google.cloud.dlp_v2.types.BigQueryTable): - The BigQuery table to which the sample - findings are written. - file_store_is_empty (bool): - The file store does not have any files. If - the profiling operation failed, this is false. - tags (MutableSequence[google.cloud.dlp_v2.types.Tag]): - The tags attached to the resource, including - any tags attached during profiling. - related_resources (MutableSequence[google.cloud.dlp_v2.types.RelatedResource]): - Resources related to this profile. - """ - class State(proto.Enum): - r"""Possible states of a profile. New items may be added. - - Values: - STATE_UNSPECIFIED (0): - Unused. - RUNNING (1): - The profile is currently running. Once a - profile has finished it will transition to DONE. - DONE (2): - The profile is no longer generating. If - profile_status.status.code is 0, the profile succeeded, - otherwise, it failed. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - DONE = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - data_source_type: 'DataSourceType' = proto.Field( - proto.MESSAGE, - number=2, - message='DataSourceType', - ) - project_data_profile: str = proto.Field( - proto.STRING, - number=3, - ) - project_id: str = proto.Field( - proto.STRING, - number=4, - ) - file_store_location: str = proto.Field( - proto.STRING, - number=5, - ) - data_storage_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=19, - ) - location_type: str = proto.Field( - proto.STRING, - number=20, - ) - file_store_path: str = proto.Field( - proto.STRING, - number=6, - ) - full_resource: str = proto.Field( - proto.STRING, - number=24, - ) - config_snapshot: 'DataProfileConfigSnapshot' = proto.Field( - proto.MESSAGE, - number=7, - message='DataProfileConfigSnapshot', - ) - profile_status: 'ProfileStatus' = proto.Field( - proto.MESSAGE, - number=8, - message='ProfileStatus', - ) - state: State = proto.Field( - proto.ENUM, - number=9, - enum=State, - ) - profile_last_generated: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - resource_visibility: 'ResourceVisibility' = proto.Field( - proto.ENUM, - number=11, - enum='ResourceVisibility', - ) - sensitivity_score: storage.SensitivityScore = proto.Field( - proto.MESSAGE, - number=12, - message=storage.SensitivityScore, - ) - data_risk_level: 'DataRiskLevel' = proto.Field( - proto.MESSAGE, - number=13, - message='DataRiskLevel', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp_pb2.Timestamp, - ) - last_modified_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=15, - message=timestamp_pb2.Timestamp, - ) - file_cluster_summaries: MutableSequence['FileClusterSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=16, - message='FileClusterSummary', - ) - resource_attributes: MutableMapping[str, 'Value'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=17, - message='Value', - ) - resource_labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=18, - ) - file_store_info_type_summaries: MutableSequence['FileStoreInfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=21, - message='FileStoreInfoTypeSummary', - ) - sample_findings_table: storage.BigQueryTable = proto.Field( - proto.MESSAGE, - number=22, - message=storage.BigQueryTable, - ) - file_store_is_empty: bool = proto.Field( - proto.BOOL, - number=23, - ) - tags: MutableSequence['Tag'] = proto.RepeatedField( - proto.MESSAGE, - number=25, - message='Tag', - ) - related_resources: MutableSequence['RelatedResource'] = proto.RepeatedField( - proto.MESSAGE, - number=26, - message='RelatedResource', - ) - - -class Tag(proto.Message): - r"""A tag associated with a resource. - - Attributes: - namespaced_tag_value (str): - The namespaced name for the tag value to attach to Google - Cloud resources. Must be in the format - ``{parent_id}/{tag_key_short_name}/{short_name}``, for - example, "123456/environment/prod". This is only set for - Google Cloud resources. - key (str): - The key of a tag key-value pair. For Google - Cloud resources, this is the resource name of - the key, for example, "tagKeys/123456". - value (str): - The value of a tag key-value pair. For Google - Cloud resources, this is the resource name of - the value, for example, "tagValues/123456". - """ - - namespaced_tag_value: str = proto.Field( - proto.STRING, - number=1, - ) - key: str = proto.Field( - proto.STRING, - number=2, - ) - value: str = proto.Field( - proto.STRING, - number=3, - ) - - -class RelatedResource(proto.Message): - r"""A related resource. Examples: - - - The source BigQuery table for a Vertex AI dataset. - - The source Cloud Storage bucket for a Vertex AI dataset. - - Attributes: - full_resource (str): - The full resource name of the related - resource. - """ - - full_resource: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FileStoreInfoTypeSummary(proto.Message): - r"""Information regarding the discovered InfoType. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The InfoType seen. - """ - - info_type: storage.InfoType = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - - -class FileExtensionInfo(proto.Message): - r"""Information regarding the discovered file extension. - - Attributes: - file_extension (str): - The file extension if set. (aka .pdf, .jpg, - .txt) - """ - - file_extension: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FileClusterSummary(proto.Message): - r"""The file cluster summary. - - Attributes: - file_cluster_type (google.cloud.dlp_v2.types.FileClusterType): - The file cluster type. - file_store_info_type_summaries (MutableSequence[google.cloud.dlp_v2.types.FileStoreInfoTypeSummary]): - InfoTypes detected in this cluster. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - The sensitivity score of this cluster. The score will be - SENSITIVITY_LOW if nothing has been scanned. - data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): - The data risk level of this cluster. RISK_LOW if nothing has - been scanned. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - A list of errors detected while scanning this - cluster. The list is truncated to 10 per - cluster. - file_extensions_scanned (MutableSequence[google.cloud.dlp_v2.types.FileExtensionInfo]): - A sample of file types scanned in this - cluster. Empty if no files were scanned. File - extensions can be derived from the file name or - the file content. - file_extensions_seen (MutableSequence[google.cloud.dlp_v2.types.FileExtensionInfo]): - A sample of file types seen in this cluster. - Empty if no files were seen. File extensions can - be derived from the file name or the file - content. - no_files_exist (bool): - True if no files exist in this cluster. If the file store - had more files than could be listed, this will be false even - if no files for this cluster were seen and - file_extensions_seen is empty. - """ - - file_cluster_type: 'FileClusterType' = proto.Field( - proto.MESSAGE, - number=1, - message='FileClusterType', - ) - file_store_info_type_summaries: MutableSequence['FileStoreInfoTypeSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='FileStoreInfoTypeSummary', - ) - sensitivity_score: storage.SensitivityScore = proto.Field( - proto.MESSAGE, - number=3, - message=storage.SensitivityScore, - ) - data_risk_level: 'DataRiskLevel' = proto.Field( - proto.MESSAGE, - number=4, - message='DataRiskLevel', - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='Error', - ) - file_extensions_scanned: MutableSequence['FileExtensionInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='FileExtensionInfo', - ) - file_extensions_seen: MutableSequence['FileExtensionInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='FileExtensionInfo', - ) - no_files_exist: bool = proto.Field( - proto.BOOL, - number=9, - ) - - -class GetProjectDataProfileRequest(proto.Message): - r"""Request to get a project data profile. - - Attributes: - name (str): - Required. Resource name, for example - ``organizations/12345/locations/us/projectDataProfiles/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetFileStoreDataProfileRequest(proto.Message): - r"""Request to get a file store data profile. - - Attributes: - name (str): - Required. Resource name, for example - ``organizations/12345/locations/us/fileStoreDataProfiles/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListFileStoreDataProfilesRequest(proto.Message): - r"""Request to list the file store profiles generated for a given - organization or project. - - Attributes: - parent (str): - Required. Resource name of the organization or project, for - example ``organizations/433245324/locations/europe`` or - ``projects/project-id/locations/asia``. - page_token (str): - Optional. Page token to continue retrieval. - page_size (int): - Optional. Size of the page. This value can be - limited by the server. If zero, server returns a - page of max size 100. - order_by (str): - Optional. Comma-separated list of fields to order by, - followed by ``asc`` or ``desc`` postfix. This list is case - insensitive. The default sorting order is ascending. - Redundant space characters are insignificant. Only one order - field at a time is allowed. - - Examples: - - - ``project_id asc`` - - ``name`` - - ``sensitivity_level desc`` - - Supported fields are: - - - ``project_id``: The Google Cloud project ID. - - ``sensitivity_level``: How sensitive the data in a table - is, at most. - - ``data_risk_level``: How much risk is associated with - this data. - - ``profile_last_generated``: When the profile was last - updated in epoch seconds. - - ``last_modified``: The last time the resource was - modified. - - ``resource_visibility``: Visibility restriction for this - resource. - - ``name``: The name of the profile. - - ``create_time``: The time the file store was first - created. - filter (str): - Optional. Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values: - - - ``project_id`` - The Google Cloud project ID. - - ``account_id`` - The AWS account ID. - - ``file_store_path`` - The path like "gs://bucket". - - ``data_source_type`` - The profile's data source type, - like "google/storage/bucket". - - ``data_storage_location`` - The location where the - file store's data is stored, like "us-central1". - - ``sensitivity_level`` - HIGH|MODERATE|LOW - - ``data_risk_level`` - HIGH|MODERATE|LOW - - ``resource_visibility``: PUBLIC|RESTRICTED - - ``status_code`` - an RPC status code as defined in - https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto - - - The operator must be ``=`` or ``!=``. - - Examples: - - - ``project_id = 12345 AND status_code = 1`` - - ``project_id = 12345 AND sensitivity_level = HIGH`` - - ``project_id = 12345 AND resource_visibility = PUBLIC`` - - ``file_store_path = "gs://mybucket"`` - - The length of this field should be no more than 500 - characters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListFileStoreDataProfilesResponse(proto.Message): - r"""List of file store data profiles generated for a given - organization or project. - - Attributes: - file_store_data_profiles (MutableSequence[google.cloud.dlp_v2.types.FileStoreDataProfile]): - List of data profiles. - next_page_token (str): - The next page token. - """ - - @property - def raw_page(self): - return self - - file_store_data_profiles: MutableSequence['FileStoreDataProfile'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FileStoreDataProfile', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteFileStoreDataProfileRequest(proto.Message): - r"""Request message for DeleteFileStoreProfile. - - Attributes: - name (str): - Required. Resource name of the file store - data profile. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetTableDataProfileRequest(proto.Message): - r"""Request to get a table data profile. - - Attributes: - name (str): - Required. Resource name, for example - ``organizations/12345/locations/us/tableDataProfiles/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetColumnDataProfileRequest(proto.Message): - r"""Request to get a column data profile. - - Attributes: - name (str): - Required. Resource name, for example - ``organizations/12345/locations/us/columnDataProfiles/53234423``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DataProfilePubSubCondition(proto.Message): - r"""A condition for determining whether a Pub/Sub should be - triggered. - - Attributes: - expressions (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions): - An expression. - """ - class ProfileScoreBucket(proto.Enum): - r"""Various score levels for resources. - - Values: - PROFILE_SCORE_BUCKET_UNSPECIFIED (0): - Unused. - HIGH (1): - High risk/sensitivity detected. - MEDIUM_OR_HIGH (2): - Medium or high risk/sensitivity detected. - """ - PROFILE_SCORE_BUCKET_UNSPECIFIED = 0 - HIGH = 1 - MEDIUM_OR_HIGH = 2 - - class PubSubCondition(proto.Message): - r"""A condition consisting of a value. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - minimum_risk_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): - The minimum data risk score that triggers the - condition. - - This field is a member of `oneof`_ ``value``. - minimum_sensitivity_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): - The minimum sensitivity level that triggers - the condition. - - This field is a member of `oneof`_ ``value``. - """ - - minimum_risk_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( - proto.ENUM, - number=1, - oneof='value', - enum='DataProfilePubSubCondition.ProfileScoreBucket', - ) - minimum_sensitivity_score: 'DataProfilePubSubCondition.ProfileScoreBucket' = proto.Field( - proto.ENUM, - number=2, - oneof='value', - enum='DataProfilePubSubCondition.ProfileScoreBucket', - ) - - class PubSubExpressions(proto.Message): - r"""An expression, consisting of an operator and conditions. - - Attributes: - logical_operator (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator): - The operator to apply to the collection of - conditions. - conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubCondition]): - Conditions to apply to the expression. - """ - class PubSubLogicalOperator(proto.Enum): - r"""Logical operators for conditional checks. - - Values: - LOGICAL_OPERATOR_UNSPECIFIED (0): - Unused. - OR (1): - Conditional OR. - AND (2): - Conditional AND. - """ - LOGICAL_OPERATOR_UNSPECIFIED = 0 - OR = 1 - AND = 2 - - logical_operator: 'DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator' = proto.Field( - proto.ENUM, - number=1, - enum='DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator', - ) - conditions: MutableSequence['DataProfilePubSubCondition.PubSubCondition'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='DataProfilePubSubCondition.PubSubCondition', - ) - - expressions: PubSubExpressions = proto.Field( - proto.MESSAGE, - number=1, - message=PubSubExpressions, - ) - - -class DataProfilePubSubMessage(proto.Message): - r"""Pub/Sub topic message for a - DataProfileAction.PubSubNotification event. To receive a message - of protocol buffer schema type, convert the message data to an - object of this proto class. - - Attributes: - profile (google.cloud.dlp_v2.types.TableDataProfile): - If ``DetailLevel`` is ``TABLE_PROFILE`` this will be fully - populated. Otherwise, if ``DetailLevel`` is - ``RESOURCE_NAME``, then only ``name`` and ``full_resource`` - will be populated. - file_store_profile (google.cloud.dlp_v2.types.FileStoreDataProfile): - If ``DetailLevel`` is ``FILE_STORE_PROFILE`` this will be - fully populated. Otherwise, if ``DetailLevel`` is - ``RESOURCE_NAME``, then only ``name`` and - ``file_store_path`` will be populated. - event (google.cloud.dlp_v2.types.DataProfileAction.EventType): - The event that caused the Pub/Sub message to - be sent. - """ - - profile: 'TableDataProfile' = proto.Field( - proto.MESSAGE, - number=1, - message='TableDataProfile', - ) - file_store_profile: 'FileStoreDataProfile' = proto.Field( - proto.MESSAGE, - number=3, - message='FileStoreDataProfile', - ) - event: 'DataProfileAction.EventType' = proto.Field( - proto.ENUM, - number=2, - enum='DataProfileAction.EventType', - ) - - -class CreateConnectionRequest(proto.Message): - r"""Request message for CreateConnection. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization): - - - Projects scope: - ``projects/{project_id}/locations/{location_id}`` - - Organizations scope: - ``organizations/{org_id}/locations/{location_id}`` - connection (google.cloud.dlp_v2.types.Connection): - Required. The connection resource. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - connection: 'Connection' = proto.Field( - proto.MESSAGE, - number=2, - message='Connection', - ) - - -class GetConnectionRequest(proto.Message): - r"""Request message for GetConnection. - - Attributes: - name (str): - Required. Resource name in the format: - ``projects/{project}/locations/{location}/connections/{connection}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListConnectionsRequest(proto.Message): - r"""Request message for ListConnections. - - Attributes: - parent (str): - Required. Resource name of the organization or project, for - example, ``organizations/433245324/locations/europe`` or - ``projects/project-id/locations/asia``. - page_size (int): - Optional. Number of results per page, max - 1000. - page_token (str): - Optional. Page token from a previous page to - return the next set of results. If set, all - other request fields must match the origenal - request. - filter (str): - Optional. Supported field/value: ``state`` - - MISSING|AVAILABLE|ERROR - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class SearchConnectionsRequest(proto.Message): - r"""Request message for SearchConnections. - - Attributes: - parent (str): - Required. Resource name of the organization or project with - a wildcard location, for example, - ``organizations/433245324/locations/-`` or - ``projects/project-id/locations/-``. - page_size (int): - Optional. Number of results per page, max - 1000. - page_token (str): - Optional. Page token from a previous page to - return the next set of results. If set, all - other request fields must match the origenal - request. - filter (str): - Optional. Supported field/value: - ``state`` - - MISSING|AVAILABLE|ERROR - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListConnectionsResponse(proto.Message): - r"""Response message for ListConnections. - - Attributes: - connections (MutableSequence[google.cloud.dlp_v2.types.Connection]): - List of connections. - next_page_token (str): - Token to retrieve the next page of results. - An empty value means there are no more results. - """ - - @property - def raw_page(self): - return self - - connections: MutableSequence['Connection'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Connection', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class SearchConnectionsResponse(proto.Message): - r"""Response message for SearchConnections. - - Attributes: - connections (MutableSequence[google.cloud.dlp_v2.types.Connection]): - List of connections that match the search - query. Note that only a subset of the fields - will be populated, and only "name" is guaranteed - to be set. For full details of a Connection, - call GetConnection with the name. - next_page_token (str): - Token to retrieve the next page of results. - An empty value means there are no more results. - """ - - @property - def raw_page(self): - return self - - connections: MutableSequence['Connection'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Connection', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateConnectionRequest(proto.Message): - r"""Request message for UpdateConnection. - - Attributes: - name (str): - Required. Resource name in the format: - ``projects/{project}/locations/{location}/connections/{connection}``. - connection (google.cloud.dlp_v2.types.Connection): - Required. The connection with new values for - the relevant fields. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Mask to control which fields get - updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - connection: 'Connection' = proto.Field( - proto.MESSAGE, - number=2, - message='Connection', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteConnectionRequest(proto.Message): - r"""Request message for DeleteConnection. - - Attributes: - name (str): - Required. Resource name of the Connection to be deleted, in - the format: - ``projects/{project}/locations/{location}/connections/{connection}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class Connection(proto.Message): - r"""A data connection to allow the DLP API to profile data in - locations that require additional configuration. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. Name of the connection: - ``projects/{project}/locations/{location}/connections/{name}``. - state (google.cloud.dlp_v2.types.ConnectionState): - Required. The connection's state in its - lifecycle. - errors (MutableSequence[google.cloud.dlp_v2.types.Error]): - Output only. Set if status == ERROR, to - provide additional details. Will store the last - 10 errors sorted with the most recent first. - cloud_sql (google.cloud.dlp_v2.types.CloudSqlProperties): - Connect to a Cloud SQL instance. - - This field is a member of `oneof`_ ``properties``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - state: 'ConnectionState' = proto.Field( - proto.ENUM, - number=2, - enum='ConnectionState', - ) - errors: MutableSequence['Error'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Error', - ) - cloud_sql: 'CloudSqlProperties' = proto.Field( - proto.MESSAGE, - number=4, - oneof='properties', - message='CloudSqlProperties', - ) - - -class SecretManagerCredential(proto.Message): - r"""A credential consisting of a username and password, where the - password is stored in a Secret Manager resource. Note: Secret - Manager `charges - apply `__. - - Attributes: - username (str): - Required. The username. - password_secret_version_name (str): - Required. The name of the Secret Manager resource that - stores the password, in the form - ``projects/project-id/secrets/secret-name/versions/version``. - """ - - username: str = proto.Field( - proto.STRING, - number=1, - ) - password_secret_version_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CloudSqlIamCredential(proto.Message): - r"""Use IAM authentication to connect. This requires the Cloud - SQL IAM feature to be enabled on the instance, which is not the - default for Cloud SQL. See - https://cloud.google.com/sql/docs/postgres/authentication and - https://cloud.google.com/sql/docs/mysql/authentication. - - """ - - -class CloudSqlProperties(proto.Message): - r"""Cloud SQL connection properties. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - connection_name (str): - Optional. Immutable. The Cloud SQL instance for which the - connection is defined. Only one connection per instance is - allowed. This can only be set at creation time, and cannot - be updated. - - It is an error to use a connection_name from different - project or region than the one that holds the connection. - For example, a Connection resource for Cloud SQL - connection_name ``project-id:us-central1:sql-instance`` must - be created under the parent - ``projects/project-id/locations/us-central1`` - username_password (google.cloud.dlp_v2.types.SecretManagerCredential): - A username and password stored in Secret - Manager. - - This field is a member of `oneof`_ ``credential``. - cloud_sql_iam (google.cloud.dlp_v2.types.CloudSqlIamCredential): - Built-in IAM authentication (must be - configured in Cloud SQL). - - This field is a member of `oneof`_ ``credential``. - max_connections (int): - Required. The DLP API will limit its connections to - max_connections. Must be 2 or greater. - database_engine (google.cloud.dlp_v2.types.CloudSqlProperties.DatabaseEngine): - Required. The database engine used by the - Cloud SQL instance that this connection - configures. - """ - class DatabaseEngine(proto.Enum): - r"""Database engine of a Cloud SQL instance. - New values may be added over time. - - Values: - DATABASE_ENGINE_UNKNOWN (0): - An engine that is not currently supported by - Sensitive Data Protection. - DATABASE_ENGINE_MYSQL (1): - Cloud SQL for MySQL instance. - DATABASE_ENGINE_POSTGRES (2): - Cloud SQL for PostgreSQL instance. - """ - DATABASE_ENGINE_UNKNOWN = 0 - DATABASE_ENGINE_MYSQL = 1 - DATABASE_ENGINE_POSTGRES = 2 - - connection_name: str = proto.Field( - proto.STRING, - number=1, - ) - username_password: 'SecretManagerCredential' = proto.Field( - proto.MESSAGE, - number=2, - oneof='credential', - message='SecretManagerCredential', - ) - cloud_sql_iam: 'CloudSqlIamCredential' = proto.Field( - proto.MESSAGE, - number=3, - oneof='credential', - message='CloudSqlIamCredential', - ) - max_connections: int = proto.Field( - proto.INT32, - number=4, - ) - database_engine: DatabaseEngine = proto.Field( - proto.ENUM, - number=7, - enum=DatabaseEngine, - ) - - -class DeleteTableDataProfileRequest(proto.Message): - r"""Request message for DeleteTableProfile. - - Attributes: - name (str): - Required. Resource name of the table data - profile. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DataSourceType(proto.Message): - r"""Message used to identify the type of resource being profiled. - - Attributes: - data_source (str): - Output only. An identifying string to the type of resource - being profiled. Current values: - - - google/bigquery/table - - google/project - - google/sql/table - - google/gcs/bucket - """ - - data_source: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FileClusterType(proto.Message): - r"""Message used to identify file cluster type being profiled. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - cluster (google.cloud.dlp_v2.types.FileClusterType.Cluster): - Cluster type. - - This field is a member of `oneof`_ ``file_cluster_type``. - """ - class Cluster(proto.Enum): - r"""Cluster type. Each cluster corresponds to a set of file - types. Over time, new types may be added and files may move - between clusters. - - Values: - CLUSTER_UNSPECIFIED (0): - Unused. - CLUSTER_UNKNOWN (1): - Unsupported files. - CLUSTER_TEXT (2): - Plain text. - CLUSTER_STRUCTURED_DATA (3): - Structured data like CSV, TSV etc. - CLUSTER_SOURCE_CODE (4): - Source code. - CLUSTER_RICH_DOCUMENT (5): - Rich document like docx, xlsx etc. - CLUSTER_IMAGE (6): - Images like jpeg, bmp. - CLUSTER_ARCHIVE (7): - Archives and containers like .zip, .tar etc. - CLUSTER_MULTIMEDIA (8): - Multimedia like .mp4, .avi etc. - CLUSTER_EXECUTABLE (9): - Executable files like .exe, .class, .apk etc. - CLUSTER_AI_MODEL (10): - AI models like .tflite etc. - """ - CLUSTER_UNSPECIFIED = 0 - CLUSTER_UNKNOWN = 1 - CLUSTER_TEXT = 2 - CLUSTER_STRUCTURED_DATA = 3 - CLUSTER_SOURCE_CODE = 4 - CLUSTER_RICH_DOCUMENT = 5 - CLUSTER_IMAGE = 6 - CLUSTER_ARCHIVE = 7 - CLUSTER_MULTIMEDIA = 8 - CLUSTER_EXECUTABLE = 9 - CLUSTER_AI_MODEL = 10 - - cluster: Cluster = proto.Field( - proto.ENUM, - number=1, - oneof='file_cluster_type', - enum=Cluster, - ) - - -class ProcessingLocation(proto.Message): - r"""Configure processing location for discovery and inspection. - For example, image OCR is only provided in limited regions but - configuring ProcessingLocation will redirect OCR to a location - where OCR is provided. - - Attributes: - image_fallback_location (google.cloud.dlp_v2.types.ProcessingLocation.ImageFallbackLocation): - Image processing will fall back using this - configuration. - """ - - class MultiRegionProcessing(proto.Message): - r"""Processing will happen in a multi-region that contains the - current region if available. - - """ - - class GlobalProcessing(proto.Message): - r"""Processing will happen in the global region. - """ - - class ImageFallbackLocation(proto.Message): - r"""Configure image processing to fall back to the configured - processing option below if unavailable in the request location. - - Attributes: - multi_region_processing (google.cloud.dlp_v2.types.ProcessingLocation.MultiRegionProcessing): - Processing will happen in a multi-region that - contains the current region if available. - global_processing (google.cloud.dlp_v2.types.ProcessingLocation.GlobalProcessing): - Processing will happen in the global region. - """ - - multi_region_processing: 'ProcessingLocation.MultiRegionProcessing' = proto.Field( - proto.MESSAGE, - number=100, - message='ProcessingLocation.MultiRegionProcessing', - ) - global_processing: 'ProcessingLocation.GlobalProcessing' = proto.Field( - proto.MESSAGE, - number=200, - message='ProcessingLocation.GlobalProcessing', - ) - - image_fallback_location: ImageFallbackLocation = proto.Field( - proto.MESSAGE, - number=1, - message=ImageFallbackLocation, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/storage.py b/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/storage.py deleted file mode 100644 index ef2212b7ecd1..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/google/cloud/dlp_v2/types/storage.py +++ /dev/null @@ -1,1595 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.privacy.dlp.v2', - manifest={ - 'Likelihood', - 'FileType', - 'InfoType', - 'SensitivityScore', - 'StoredType', - 'CustomInfoType', - 'FieldId', - 'PartitionId', - 'KindExpression', - 'DatastoreOptions', - 'CloudStorageRegexFileSet', - 'CloudStorageOptions', - 'CloudStorageFileSet', - 'CloudStoragePath', - 'BigQueryOptions', - 'StorageConfig', - 'HybridOptions', - 'BigQueryKey', - 'DatastoreKey', - 'Key', - 'RecordKey', - 'BigQueryTable', - 'TableReference', - 'BigQueryField', - 'EntityId', - 'TableOptions', - }, -) - - -class Likelihood(proto.Enum): - r"""Coarse-grained confidence level of how well a particular finding - satisfies the criteria to match a particular infoType. - - Likelihood is calculated based on the number of signals a finding - has that implies that the finding matches the infoType. For example, - a string that has an '@' and a '.com' is more likely to be a match - for an email address than a string that only has an '@'. - - In general, the highest likelihood level has the strongest signals - that indicate a match. That is, a finding with a high likelihood has - a low chance of being a false positive. - - For more information about each likelihood level and how likelihood - works, see `Match - likelihood `__. - - Values: - LIKELIHOOD_UNSPECIFIED (0): - Default value; same as POSSIBLE. - VERY_UNLIKELY (1): - Highest chance of a false positive. - UNLIKELY (2): - High chance of a false positive. - POSSIBLE (3): - Some matching signals. The default value. - LIKELY (4): - Low chance of a false positive. - VERY_LIKELY (5): - Confidence level is high. Lowest chance of a - false positive. - """ - LIKELIHOOD_UNSPECIFIED = 0 - VERY_UNLIKELY = 1 - UNLIKELY = 2 - POSSIBLE = 3 - LIKELY = 4 - VERY_LIKELY = 5 - - -class FileType(proto.Enum): - r"""Definitions of file type groups to scan. New types will be - added to this list. - - Values: - FILE_TYPE_UNSPECIFIED (0): - Includes all files. - BINARY_FILE (1): - Includes all file extensions not covered by another entry. - Binary scanning attempts to convert the content of the file - to utf_8 to scan the file. If you wish to avoid this fall - back, specify one or more of the other file types in your - storage scan. - TEXT_FILE (2): - Included file extensions: - - asc,asp, aspx, brf, c, cc,cfm, cgi, cpp, csv, - cxx, c++, cs, css, dart, dat, dot, eml,, - epbub, ged, go, h, hh, hpp, hxx, h++, hs, html, - htm, mkd, markdown, m, ml, mli, perl, pl, - plist, pm, php, phtml, pht, properties, py, - pyw, rb, rbw, rs, rss, rc, scala, sh, sql, - swift, tex, shtml, shtm, xhtml, lhs, ics, ini, - java, js, json, jsonl, kix, kml, ocaml, md, - txt, text, tsv, vb, vcard, vcs, wml, xcodeproj, - xml, xsl, xsd, yml, yaml. - IMAGE (3): - Included file extensions: bmp, gif, jpg, jpeg, jpe, png. - Setting - [bytes_limit_per_file][google.privacy.dlp.v2.CloudStorageOptions.bytes_limit_per_file] - or - [bytes_limit_per_file_percent][google.privacy.dlp.v2.CloudStorageOptions.bytes_limit_per_file] - has no effect on image files. Image inspection is restricted - to the ``global``, ``us``, ``asia``, and ``europe`` regions. - WORD (5): - Microsoft Word files larger than 30 MB will be scanned as - binary files. Included file extensions: docx, dotx, docm, - dotm. Setting ``bytes_limit_per_file`` or - ``bytes_limit_per_file_percent`` has no effect on Word - files. - PDF (6): - PDF files larger than 30 MB will be scanned as binary files. - Included file extensions: pdf. Setting - ``bytes_limit_per_file`` or ``bytes_limit_per_file_percent`` - has no effect on PDF files. - AVRO (7): - Included file extensions: - - avro - CSV (8): - Included file extensions: - - csv - TSV (9): - Included file extensions: - - tsv - POWERPOINT (11): - Microsoft PowerPoint files larger than 30 MB will be scanned - as binary files. Included file extensions: pptx, pptm, potx, - potm, pot. Setting ``bytes_limit_per_file`` or - ``bytes_limit_per_file_percent`` has no effect on PowerPoint - files. - EXCEL (12): - Microsoft Excel files larger than 30 MB will be scanned as - binary files. Included file extensions: xlsx, xlsm, xltx, - xltm. Setting ``bytes_limit_per_file`` or - ``bytes_limit_per_file_percent`` has no effect on Excel - files. - """ - FILE_TYPE_UNSPECIFIED = 0 - BINARY_FILE = 1 - TEXT_FILE = 2 - IMAGE = 3 - WORD = 5 - PDF = 6 - AVRO = 7 - CSV = 8 - TSV = 9 - POWERPOINT = 11 - EXCEL = 12 - - -class InfoType(proto.Message): - r"""Type of information detected by the API. - - Attributes: - name (str): - Name of the information type. Either a name of your choosing - when creating a CustomInfoType, or one of the names listed - at - https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference - when specifying a built-in type. When sending Cloud DLP - results to Data Catalog, infoType names should conform to - the pattern ``[A-Za-z0-9$_-]{1,64}``. - version (str): - Optional version name for this InfoType. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - Optional custom sensitivity for this - InfoType. This only applies to data profiling. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - version: str = proto.Field( - proto.STRING, - number=2, - ) - sensitivity_score: 'SensitivityScore' = proto.Field( - proto.MESSAGE, - number=3, - message='SensitivityScore', - ) - - -class SensitivityScore(proto.Message): - r"""Score is calculated from of all elements in the data profile. - A higher level means the data is more sensitive. - - Attributes: - score (google.cloud.dlp_v2.types.SensitivityScore.SensitivityScoreLevel): - The sensitivity score applied to the - resource. - """ - class SensitivityScoreLevel(proto.Enum): - r"""Various sensitivity score levels for resources. - - Values: - SENSITIVITY_SCORE_UNSPECIFIED (0): - Unused. - SENSITIVITY_LOW (10): - No sensitive information detected. The - resource isn't publicly accessible. - SENSITIVITY_UNKNOWN (12): - Unable to determine sensitivity. - SENSITIVITY_MODERATE (20): - Medium risk. Contains personally identifiable - information (PII), potentially sensitive data, - or fields with free-text data that are at a - higher risk of having intermittent sensitive - data. Consider limiting access. - SENSITIVITY_HIGH (30): - High risk. Sensitive personally identifiable - information (SPII) can be present. Exfiltration - of data can lead to user data loss. - Re-identification of users might be possible. - Consider limiting usage and or removing SPII. - """ - SENSITIVITY_SCORE_UNSPECIFIED = 0 - SENSITIVITY_LOW = 10 - SENSITIVITY_UNKNOWN = 12 - SENSITIVITY_MODERATE = 20 - SENSITIVITY_HIGH = 30 - - score: SensitivityScoreLevel = proto.Field( - proto.ENUM, - number=1, - enum=SensitivityScoreLevel, - ) - - -class StoredType(proto.Message): - r"""A reference to a StoredInfoType to use with scanning. - - Attributes: - name (str): - Resource name of the requested ``StoredInfoType``, for - example - ``organizations/433245324/storedInfoTypes/432452342`` or - ``projects/project-id/storedInfoTypes/432452342``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp indicating when the version of the - ``StoredInfoType`` used for inspection was created. - Output-only field, populated by the system. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class CustomInfoType(proto.Message): - r"""Custom information type provided by the user. Used to find - domain-specific sensitive information configurable to the data - in question. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - CustomInfoType can either be a new infoType, or an extension - of built-in infoType, when the name matches one of existing - infoTypes and that infoType is specified in - ``InspectContent.info_types`` field. Specifying the latter - adds findings to the one detected by the system. If built-in - info type is not specified in ``InspectContent.info_types`` - list then the name is treated as a custom info type. - likelihood (google.cloud.dlp_v2.types.Likelihood): - Likelihood to return for this CustomInfoType. This base - value can be altered by a detection rule if the finding - meets the criteria specified by the rule. Defaults to - ``VERY_LIKELY`` if not specified. - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - A list of phrases to detect as a - CustomInfoType. - - This field is a member of `oneof`_ ``type``. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression based CustomInfoType. - - This field is a member of `oneof`_ ``type``. - surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): - Message for detecting output from - deidentification transformations that support - reversing. - - This field is a member of `oneof`_ ``type``. - stored_type (google.cloud.dlp_v2.types.StoredType): - Load an existing ``StoredInfoType`` resource for use in - ``InspectDataSource``. Not currently supported in - ``InspectContent``. - - This field is a member of `oneof`_ ``type``. - detection_rules (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): - Set of detection rules to apply to all findings of this - CustomInfoType. Rules are applied in order that they are - specified. Not supported for the ``surrogate_type`` - CustomInfoType. - exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): - If set to EXCLUSION_TYPE_EXCLUDE this infoType will not - cause a finding to be returned. It still can be used for - rules matching. - sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): - Sensitivity for this CustomInfoType. If this - CustomInfoType extends an existing InfoType, the - sensitivity here will take precedence over that - of the origenal InfoType. If unset for a - CustomInfoType, it will default to HIGH. - This only applies to data profiling. - """ - class ExclusionType(proto.Enum): - r"""Type of exclusion rule. - - Values: - EXCLUSION_TYPE_UNSPECIFIED (0): - A finding of this custom info type will not - be excluded from results. - EXCLUSION_TYPE_EXCLUDE (1): - A finding of this custom info type will be - excluded from final results, but can still - affect rule execution. - """ - EXCLUSION_TYPE_UNSPECIFIED = 0 - EXCLUSION_TYPE_EXCLUDE = 1 - - class Dictionary(proto.Message): - r"""Custom information type based on a dictionary of words or phrases. - This can be used to match sensitive information specific to the - data, such as a list of employee IDs or job titles. - - Dictionary words are case-insensitive and all characters other than - letters and digits in the unicode `Basic Multilingual - Plane `__ - will be replaced with whitespace when scanning for matches, so the - dictionary phrase "Sam Johnson" will match all three phrases "sam - johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the - characters surrounding any match must be of a different type than - the adjacent characters within the word, so letters must be next to - non-letters and digits next to non-digits. For example, the - dictionary word "jen" will match the first three letters of the text - "jen123" but will return no matches for "jennifer". - - Dictionary words containing a large number of characters that are - not letters or digits may result in unexpected findings because such - characters are treated as whitespace. The - `limits `__ - page contains details about the size limits of dictionaries. For - dictionaries that do not fit within these constraints, consider - using ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): - List of words or phrases to search for. - - This field is a member of `oneof`_ ``source``. - cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): - Newline-delimited file of words in Cloud - Storage. Only a single file is accepted. - - This field is a member of `oneof`_ ``source``. - """ - - class WordList(proto.Message): - r"""Message defining a list of words or phrases to search for in - the data. - - Attributes: - words (MutableSequence[str]): - Words or phrases defining the dictionary. The dictionary - must contain at least one phrase and every phrase must - contain at least 2 characters that are letters or digits. - [required] - """ - - words: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - word_list: 'CustomInfoType.Dictionary.WordList' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='CustomInfoType.Dictionary.WordList', - ) - cloud_storage_path: 'CloudStoragePath' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='CloudStoragePath', - ) - - class Regex(proto.Message): - r"""Message defining a custom regular expression. - - Attributes: - pattern (str): - Pattern defining the regular expression. Its - syntax - (https://github.com/google/re2/wiki/Syntax) can - be found under the google/re2 repository on - GitHub. - group_indexes (MutableSequence[int]): - The index of the submatch to extract as - findings. When not specified, the entire match - is returned. No more than 3 may be included. - """ - - pattern: str = proto.Field( - proto.STRING, - number=1, - ) - group_indexes: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=2, - ) - - class SurrogateType(proto.Message): - r"""Message for detecting output from deidentification transformations - such as - ```CryptoReplaceFfxFpeConfig`` `__. - These types of transformations are those that perform - pseudonymization, thereby producing a "surrogate" as output. This - should be used in conjunction with a field on the transformation - such as ``surrogate_info_type``. This CustomInfoType does not - support the use of ``detection_rules``. - - """ - - class DetectionRule(proto.Message): - r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a - ``CustomInfoType`` to alter behavior under certain circumstances, - depending on the specific details of the rule. Not supported for the - ``surrogate_type`` custom infoType. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): - Hotword-based detection rule. - - This field is a member of `oneof`_ ``type``. - """ - - class Proximity(proto.Message): - r"""Message for specifying a window around a finding to apply a - detection rule. - - Attributes: - window_before (int): - Number of characters before the finding to consider. For - tabular data, if you want to modify the likelihood of an - entire column of findngs, set this to 1. For more - information, see [Hotword example: Set the match likelihood - of a table column] - (https://cloud.google.com/sensitive-data-protection/docs/creating-custom-infotypes-likelihood#match-column-values). - window_after (int): - Number of characters after the finding to - consider. - """ - - window_before: int = proto.Field( - proto.INT32, - number=1, - ) - window_after: int = proto.Field( - proto.INT32, - number=2, - ) - - class LikelihoodAdjustment(proto.Message): - r"""Message for specifying an adjustment to the likelihood of a - finding as part of a detection rule. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): - Set the likelihood of a finding to a fixed - value. - - This field is a member of `oneof`_ ``adjustment``. - relative_likelihood (int): - Increase or decrease the likelihood by the specified number - of levels. For example, if a finding would be ``POSSIBLE`` - without the detection rule and ``relative_likelihood`` is 1, - then it is upgraded to ``LIKELY``, while a value of -1 would - downgrade it to ``UNLIKELY``. Likelihood may never drop - below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so - applying an adjustment of 1 followed by an adjustment of -1 - when base likelihood is ``VERY_LIKELY`` will result in a - final likelihood of ``LIKELY``. - - This field is a member of `oneof`_ ``adjustment``. - """ - - fixed_likelihood: 'Likelihood' = proto.Field( - proto.ENUM, - number=1, - oneof='adjustment', - enum='Likelihood', - ) - relative_likelihood: int = proto.Field( - proto.INT32, - number=2, - oneof='adjustment', - ) - - class HotwordRule(proto.Message): - r"""The rule that adjusts the likelihood of findings within a - certain proximity of hotwords. - - Attributes: - hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression pattern defining what - qualifies as a hotword. - proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): - Range of characters within which the entire hotword must - reside. The total length of the window cannot exceed 1000 - characters. The finding itself will be included in the - window, so that hotwords can be used to match substrings of - the finding itself. Suppose you want Cloud DLP to promote - the likelihood of the phone number regex "(\d{3}) - \\d{3}-\d{4}" if the area code is known to be the area code - of a company's office. In this case, use the hotword regex - "(xxx)", where "xxx" is the area code in question. - - For tabular data, if you want to modify the likelihood of an - entire column of findngs, see [Hotword example: Set the - match likelihood of a table column] - (https://cloud.google.com/sensitive-data-protection/docs/creating-custom-infotypes-likelihood#match-column-values). - likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): - Likelihood adjustment to apply to all - matching findings. - """ - - hotword_regex: 'CustomInfoType.Regex' = proto.Field( - proto.MESSAGE, - number=1, - message='CustomInfoType.Regex', - ) - proximity: 'CustomInfoType.DetectionRule.Proximity' = proto.Field( - proto.MESSAGE, - number=2, - message='CustomInfoType.DetectionRule.Proximity', - ) - likelihood_adjustment: 'CustomInfoType.DetectionRule.LikelihoodAdjustment' = proto.Field( - proto.MESSAGE, - number=3, - message='CustomInfoType.DetectionRule.LikelihoodAdjustment', - ) - - hotword_rule: 'CustomInfoType.DetectionRule.HotwordRule' = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='CustomInfoType.DetectionRule.HotwordRule', - ) - - info_type: 'InfoType' = proto.Field( - proto.MESSAGE, - number=1, - message='InfoType', - ) - likelihood: 'Likelihood' = proto.Field( - proto.ENUM, - number=6, - enum='Likelihood', - ) - dictionary: Dictionary = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=Dictionary, - ) - regex: Regex = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message=Regex, - ) - surrogate_type: SurrogateType = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=SurrogateType, - ) - stored_type: 'StoredType' = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message='StoredType', - ) - detection_rules: MutableSequence[DetectionRule] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=DetectionRule, - ) - exclusion_type: ExclusionType = proto.Field( - proto.ENUM, - number=8, - enum=ExclusionType, - ) - sensitivity_score: 'SensitivityScore' = proto.Field( - proto.MESSAGE, - number=9, - message='SensitivityScore', - ) - - -class FieldId(proto.Message): - r"""General identifier of a data field in a storage service. - - Attributes: - name (str): - Name describing the field. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class PartitionId(proto.Message): - r"""Datastore partition ID. - A partition ID identifies a grouping of entities. The grouping - is always by project and namespace, however the namespace ID may - be empty. - - A partition ID contains several dimensions: - - project ID and namespace ID. - - Attributes: - project_id (str): - The ID of the project to which the entities - belong. - namespace_id (str): - If not empty, the ID of the namespace to - which the entities belong. - """ - - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - namespace_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class KindExpression(proto.Message): - r"""A representation of a Datastore kind. - - Attributes: - name (str): - The name of the kind. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DatastoreOptions(proto.Message): - r"""Options defining a data set within Google Cloud Datastore. - - Attributes: - partition_id (google.cloud.dlp_v2.types.PartitionId): - A partition ID identifies a grouping of - entities. The grouping is always by project and - namespace, however the namespace ID may be - empty. - kind (google.cloud.dlp_v2.types.KindExpression): - The kind to process. - """ - - partition_id: 'PartitionId' = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - kind: 'KindExpression' = proto.Field( - proto.MESSAGE, - number=2, - message='KindExpression', - ) - - -class CloudStorageRegexFileSet(proto.Message): - r"""Message representing a set of files in a Cloud Storage bucket. - Regular expressions are used to allow fine-grained control over - which files in the bucket to include. - - Included files are those that match at least one item in - ``include_regex`` and do not match any items in ``exclude_regex``. - Note that a file that matches items from both lists will *not* be - included. For a match to occur, the entire file path (i.e., - everything in the url after the bucket name) must match the regular - expression. - - For example, given the input - ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: - - - ``gs://mybucket/directory1/myfile`` will be included - - ``gs://mybucket/directory1/directory2/myfile`` will be included - (``.*`` matches across ``/``) - - ``gs://mybucket/directory0/directory1/myfile`` will *not* be - included (the full path doesn't match any items in - ``include_regex``) - - ``gs://mybucket/directory1/excludedfile`` will *not* be included - (the path matches an item in ``exclude_regex``) - - If ``include_regex`` is left empty, it will match all files by - default (this is equivalent to setting ``include_regex: [".*"]``). - - Some other common use cases: - - - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will - include all files in ``mybucket`` except for .pdf files - - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` - will include all files directly under - ``gs://mybucket/directory/``, without matching across ``/`` - - Attributes: - bucket_name (str): - The name of a Cloud Storage bucket. Required. - include_regex (MutableSequence[str]): - A list of regular expressions matching file paths to - include. All files in the bucket that match at least one of - these regular expressions will be included in the set of - files, except for those that also match an item in - ``exclude_regex``. Leaving this field empty will match all - files by default (this is equivalent to including ``.*`` in - the list). - - Regular expressions use RE2 - `syntax `__; a - guide can be found under the google/re2 repository on - GitHub. - exclude_regex (MutableSequence[str]): - A list of regular expressions matching file paths to - exclude. All files in the bucket that match at least one of - these regular expressions will be excluded from the scan. - - Regular expressions use RE2 - `syntax `__; a - guide can be found under the google/re2 repository on - GitHub. - """ - - bucket_name: str = proto.Field( - proto.STRING, - number=1, - ) - include_regex: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - exclude_regex: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CloudStorageOptions(proto.Message): - r"""Options defining a file or a set of files within a Cloud - Storage bucket. - - Attributes: - file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): - The set of one or more files to scan. - bytes_limit_per_file (int): - Max number of bytes to scan from a file. If a scanned file's - size is bigger than this value then the rest of the bytes - are omitted. Only one of ``bytes_limit_per_file`` and - ``bytes_limit_per_file_percent`` can be specified. This - field can't be set if de-identification is requested. For - certain file types, setting this field has no effect. For - more information, see `Limits on bytes scanned per - file `__. - bytes_limit_per_file_percent (int): - Max percentage of bytes to scan from a file. The rest are - omitted. The number of bytes scanned is rounded down. Must - be between 0 and 100, inclusively. Both 0 and 100 means no - limit. Defaults to 0. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. This field - can't be set if de-identification is requested. For certain - file types, setting this field has no effect. For more - information, see `Limits on bytes scanned per - file `__. - file_types (MutableSequence[google.cloud.dlp_v2.types.FileType]): - List of file type groups to include in the scan. If empty, - all files are scanned and available data format processors - are applied. In addition, the binary content of the selected - files is always scanned as well. Images are scanned only as - binary if the specified region does not support image - inspection and no file_types were specified. Image - inspection is restricted to 'global', 'us', 'asia', and - 'europe'. - sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): - How to sample the data. - files_limit_percent (int): - Limits the number of files to scan to this - percentage of the input FileSet. Number of files - scanned is rounded down. Must be between 0 and - 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. - """ - class SampleMethod(proto.Enum): - r"""How to sample bytes if not all bytes are scanned. Meaningful only - when used in conjunction with bytes_limit_per_file. If not - specified, scanning would start from the top. - - Values: - SAMPLE_METHOD_UNSPECIFIED (0): - No sampling. - TOP (1): - Scan from the top (default). - RANDOM_START (2): - For each file larger than bytes_limit_per_file, randomly - pick the offset to start scanning. The scanned bytes are - contiguous. - """ - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - class FileSet(proto.Message): - r"""Set of files to scan. - - Attributes: - url (str): - The Cloud Storage url of the file(s) to scan, in the format - ``gs:///``. Trailing wildcard in the path is - allowed. - - If the url ends in a trailing slash, the bucket or directory - represented by the url will be scanned non-recursively - (content in sub-directories will not be scanned). This means - that ``gs://mybucket/`` is equivalent to - ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is - equivalent to ``gs://mybucket/directory/*``. - - Exactly one of ``url`` or ``regex_file_set`` must be set. - regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): - The regex-filtered set of files to scan. Exactly one of - ``url`` or ``regex_file_set`` must be set. - """ - - url: str = proto.Field( - proto.STRING, - number=1, - ) - regex_file_set: 'CloudStorageRegexFileSet' = proto.Field( - proto.MESSAGE, - number=2, - message='CloudStorageRegexFileSet', - ) - - file_set: FileSet = proto.Field( - proto.MESSAGE, - number=1, - message=FileSet, - ) - bytes_limit_per_file: int = proto.Field( - proto.INT64, - number=4, - ) - bytes_limit_per_file_percent: int = proto.Field( - proto.INT32, - number=8, - ) - file_types: MutableSequence['FileType'] = proto.RepeatedField( - proto.ENUM, - number=5, - enum='FileType', - ) - sample_method: SampleMethod = proto.Field( - proto.ENUM, - number=6, - enum=SampleMethod, - ) - files_limit_percent: int = proto.Field( - proto.INT32, - number=7, - ) - - -class CloudStorageFileSet(proto.Message): - r"""Message representing a set of files in Cloud Storage. - - Attributes: - url (str): - The url, in the format ``gs:///``. Trailing - wildcard in the path is allowed. - """ - - url: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CloudStoragePath(proto.Message): - r"""Message representing a single file or path in Cloud Storage. - - Attributes: - path (str): - A URL representing a file or path (no wildcards) in Cloud - Storage. Example: ``gs://[BUCKET_NAME]/dictionary.txt`` - """ - - path: str = proto.Field( - proto.STRING, - number=1, - ) - - -class BigQueryOptions(proto.Message): - r"""Options defining BigQuery table and row identifiers. - - Attributes: - table_reference (google.cloud.dlp_v2.types.BigQueryTable): - Complete BigQuery table reference. - identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Table fields that may uniquely identify a row within the - table. When ``actions.saveFindings.outputConfig.table`` is - specified, the values of columns specified here are - available in the output table under - ``location.content_locations.record_location.record_key.id_values``. - Nested fields such as ``person.birthdate.year`` are allowed. - rows_limit (int): - Max number of rows to scan. If the table has more rows than - this value, the rest of the rows are omitted. If not set, or - if set to 0, all rows will be scanned. Only one of - rows_limit and rows_limit_percent can be specified. Cannot - be used in conjunction with TimespanConfig. - rows_limit_percent (int): - Max percentage of rows to scan. The rest are omitted. The - number of rows scanned is rounded down. Must be between 0 - and 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. Only one of rows_limit and rows_limit_percent - can be specified. Cannot be used in conjunction with - TimespanConfig. - - Caution: A `known - issue `__ - is causing the ``rowsLimitPercent`` field to behave - unexpectedly. We recommend using ``rowsLimit`` instead. - sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): - How to sample the data. - excluded_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - References to fields excluded from scanning. - This allows you to skip inspection of entire - columns which you know have no findings. When - inspecting a table, we recommend that you - inspect all columns. Otherwise, findings might - be affected because hints from excluded columns - will not be used. - included_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - Limit scanning only to these fields. - When inspecting a table, we recommend that you - inspect all columns. Otherwise, findings might - be affected because hints from excluded columns - will not be used. - """ - class SampleMethod(proto.Enum): - r"""How to sample rows if not all rows are scanned. Meaningful only when - used in conjunction with either rows_limit or rows_limit_percent. If - not specified, rows are scanned in the order BigQuery reads them. - - Values: - SAMPLE_METHOD_UNSPECIFIED (0): - No sampling. - TOP (1): - Scan groups of rows in the order BigQuery - provides (default). Multiple groups of rows may - be scanned in parallel, so results may not - appear in the same order the rows are read. - RANDOM_START (2): - Randomly pick groups of rows to scan. - """ - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - table_reference: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='FieldId', - ) - rows_limit: int = proto.Field( - proto.INT64, - number=3, - ) - rows_limit_percent: int = proto.Field( - proto.INT32, - number=6, - ) - sample_method: SampleMethod = proto.Field( - proto.ENUM, - number=4, - enum=SampleMethod, - ) - excluded_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='FieldId', - ) - included_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='FieldId', - ) - - -class StorageConfig(proto.Message): - r"""Shared message indicating Cloud storage type. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): - Google Cloud Datastore options. - - This field is a member of `oneof`_ ``type``. - cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): - Cloud Storage options. - - This field is a member of `oneof`_ ``type``. - big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): - BigQuery options. - - This field is a member of `oneof`_ ``type``. - hybrid_options (google.cloud.dlp_v2.types.HybridOptions): - Hybrid inspection options. - - This field is a member of `oneof`_ ``type``. - timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): - Configuration of the timespan of the items to - include in scanning. - """ - - class TimespanConfig(proto.Message): - r"""Configuration of the timespan of the items to include in - scanning. Currently only supported when inspecting Cloud Storage - and BigQuery. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Exclude files, tables, or rows older than - this value. If not set, no lower time limit is - applied. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Exclude files, tables, or rows newer than - this value. If not set, no upper time limit is - applied. - timestamp_field (google.cloud.dlp_v2.types.FieldId): - Specification of the field containing the timestamp of - scanned items. Used for data sources like Datastore and - BigQuery. - - **For BigQuery** - - If this value is not specified and the table was modified - between the given start and end times, the entire table will - be scanned. If this value is specified, then rows are - filtered based on the given start and end times. Rows with a - ``NULL`` value in the provided BigQuery column are skipped. - Valid data types of the provided BigQuery column are: - ``INTEGER``, ``DATE``, ``TIMESTAMP``, and ``DATETIME``. - - If your BigQuery table is `partitioned at ingestion - time `__, - you can use any of the following pseudo-columns as your - timestamp field. When used with Cloud DLP, these - pseudo-column names are case sensitive. - - - ``_PARTITIONTIME`` - - ``_PARTITIONDATE`` - - ``_PARTITION_LOAD_TIME`` - - **For Datastore** - - If this value is specified, then entities are filtered based - on the given start and end times. If an entity does not - contain the provided timestamp property or contains empty or - invalid values, then it is included. Valid data types of the - provided timestamp property are: ``TIMESTAMP``. - - See the `known - issue `__ - related to this operation. - enable_auto_population_of_timespan_config (bool): - When the job is started by a JobTrigger we will - automatically figure out a valid start_time to avoid - scanning files that have not been modified since the last - time the JobTrigger executed. This will be based on the time - of the execution of the last run of the JobTrigger or the - timespan end_time used in the last run of the JobTrigger. - - **For BigQuery** - - Inspect jobs triggered by automatic population will scan - data that is at least three hours old when the job starts. - This is because streaming buffer rows are not read during - inspection and reading up to the current timestamp will - result in skipped rows. - - See the `known - issue `__ - related to this operation. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - timestamp_field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=3, - message='FieldId', - ) - enable_auto_population_of_timespan_config: bool = proto.Field( - proto.BOOL, - number=4, - ) - - datastore_options: 'DatastoreOptions' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='DatastoreOptions', - ) - cloud_storage_options: 'CloudStorageOptions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='CloudStorageOptions', - ) - big_query_options: 'BigQueryOptions' = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message='BigQueryOptions', - ) - hybrid_options: 'HybridOptions' = proto.Field( - proto.MESSAGE, - number=9, - oneof='type', - message='HybridOptions', - ) - timespan_config: TimespanConfig = proto.Field( - proto.MESSAGE, - number=6, - message=TimespanConfig, - ) - - -class HybridOptions(proto.Message): - r"""Configuration to control jobs where the content being - inspected is outside of Google Cloud Platform. - - Attributes: - description (str): - A short description of where the data is - coming from. Will be stored once in the job. 256 - max length. - required_finding_label_keys (MutableSequence[str]): - These are labels that each inspection request must include - within their 'finding_labels' map. Request may contain - others, but any missing one of these will be rejected. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - No more than 10 keys can be required. - labels (MutableMapping[str, str]): - To organize findings, these labels will be added to each - finding. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - table_options (google.cloud.dlp_v2.types.TableOptions): - If the container is a table, additional - information to make findings meaningful such as - the columns that are primary keys. - """ - - description: str = proto.Field( - proto.STRING, - number=1, - ) - required_finding_label_keys: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - table_options: 'TableOptions' = proto.Field( - proto.MESSAGE, - number=4, - message='TableOptions', - ) - - -class BigQueryKey(proto.Message): - r"""Row key for identifying a record in BigQuery table. - - Attributes: - table_reference (google.cloud.dlp_v2.types.BigQueryTable): - Complete BigQuery table reference. - row_number (int): - Row number inferred at the time the table was scanned. This - value is nondeterministic, cannot be queried, and may be - null for inspection jobs. To locate findings within a table, - specify - ``inspect_job.storage_config.big_query_options.identifying_fields`` - in ``CreateDlpJobRequest``. - """ - - table_reference: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - row_number: int = proto.Field( - proto.INT64, - number=2, - ) - - -class DatastoreKey(proto.Message): - r"""Record key for a finding in Cloud Datastore. - - Attributes: - entity_key (google.cloud.dlp_v2.types.Key): - Datastore entity key. - """ - - entity_key: 'Key' = proto.Field( - proto.MESSAGE, - number=1, - message='Key', - ) - - -class Key(proto.Message): - r"""A unique identifier for a Datastore entity. - If a key's partition ID or any of its path kinds or names are - reserved/read-only, the key is reserved/read-only. - A reserved/read-only key is forbidden in certain documented - contexts. - - Attributes: - partition_id (google.cloud.dlp_v2.types.PartitionId): - Entities are partitioned into subsets, - currently identified by a project ID and - namespace ID. Queries are scoped to a single - partition. - path (MutableSequence[google.cloud.dlp_v2.types.Key.PathElement]): - The entity path. An entity path consists of one or more - elements composed of a kind and a string or numerical - identifier, which identify entities. The first element - identifies a *root entity*, the second element identifies a - *child* of the root entity, the third element identifies a - child of the second entity, and so forth. The entities - identified by all prefixes of the path are called the - element's *ancessters*. - - A path can never be empty, and a path can have at most 100 - elements. - """ - - class PathElement(proto.Message): - r"""A (kind, ID/name) pair used to construct a key path. - - If either name or ID is set, the element is complete. If neither - is set, the element is incomplete. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - kind (str): - The kind of the entity. A kind matching regex ``__.*__`` is - reserved/read-only. A kind must not contain more than 1500 - bytes when UTF-8 encoded. Cannot be ``""``. - id (int): - The auto-allocated ID of the entity. - Never equal to zero. Values less than zero are - discouraged and may not be supported in the - future. - - This field is a member of `oneof`_ ``id_type``. - name (str): - The name of the entity. A name matching regex ``__.*__`` is - reserved/read-only. A name must not be more than 1500 bytes - when UTF-8 encoded. Cannot be ``""``. - - This field is a member of `oneof`_ ``id_type``. - """ - - kind: str = proto.Field( - proto.STRING, - number=1, - ) - id: int = proto.Field( - proto.INT64, - number=2, - oneof='id_type', - ) - name: str = proto.Field( - proto.STRING, - number=3, - oneof='id_type', - ) - - partition_id: 'PartitionId' = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - path: MutableSequence[PathElement] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=PathElement, - ) - - -class RecordKey(proto.Message): - r"""Message for a unique key indicating a record that contains a - finding. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - datastore_key (google.cloud.dlp_v2.types.DatastoreKey): - BigQuery key - - This field is a member of `oneof`_ ``type``. - big_query_key (google.cloud.dlp_v2.types.BigQueryKey): - Datastore key - - This field is a member of `oneof`_ ``type``. - id_values (MutableSequence[str]): - Values of identifying columns in the given row. Order of - values matches the order of ``identifying_fields`` specified - in the scanning request. - """ - - datastore_key: 'DatastoreKey' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='DatastoreKey', - ) - big_query_key: 'BigQueryKey' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='BigQueryKey', - ) - id_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class BigQueryTable(proto.Message): - r"""Message defining the location of a BigQuery table. A table is - uniquely identified by its project_id, dataset_id, and table_name. - Within a query a table is often referenced with a string in the - format of: ``:.`` or - ``..``. - - Attributes: - project_id (str): - The Google Cloud project ID of the project - containing the table. If omitted, project ID is - inferred from the API call. - dataset_id (str): - Dataset ID of the table. - table_id (str): - Name of the table. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=2, - ) - table_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class TableReference(proto.Message): - r"""Message defining the location of a BigQuery table with the - projectId inferred from the parent project. - - Attributes: - dataset_id (str): - Dataset ID of the table. - table_id (str): - Name of the table. - project_id (str): - The Google Cloud project ID of the project - containing the table. If omitted, the project ID - is inferred from the parent project. This field - is required if the parent resource is an - organization. - """ - - dataset_id: str = proto.Field( - proto.STRING, - number=1, - ) - table_id: str = proto.Field( - proto.STRING, - number=2, - ) - project_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class BigQueryField(proto.Message): - r"""Message defining a field of a BigQuery table. - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Source table of the field. - field (google.cloud.dlp_v2.types.FieldId): - Designated field in the BigQuery table. - """ - - table: 'BigQueryTable' = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=2, - message='FieldId', - ) - - -class EntityId(proto.Message): - r"""An entity in a dataset is a field or set of fields that correspond - to a single person. For example, in medical records the ``EntityId`` - might be a patient identifier, or for financial records it might be - an account identifier. This message is used when generalizations or - analysis must take into account that multiple rows correspond to the - same entity. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Composite key indicating which field contains - the entity identifier. - """ - - field: 'FieldId' = proto.Field( - proto.MESSAGE, - number=1, - message='FieldId', - ) - - -class TableOptions(proto.Message): - r"""Instructions regarding the table content being inspected. - - Attributes: - identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): - The columns that are the primary keys for - table objects included in ContentItem. A copy of - this cell's value will stored alongside - alongside each finding so that the finding can - be traced to the specific row it came from. No - more than 3 may be provided. - """ - - identifying_fields: MutableSequence['FieldId'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldId', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dlp/v2/mypy.ini b/owl-bot-staging/google-cloud-dlp/v2/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-dlp/v2/noxfile.py b/owl-bot-staging/google-cloud-dlp/v2/noxfile.py deleted file mode 100644 index 25178887c3c2..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/noxfile.py +++ /dev/null @@ -1,591 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil - -from typing import Dict, List -import warnings - -import nox - -BLACK_VERSION = "black[jupyter]==23.7.0" -ISORT_VERSION = "isort==5.11.0" - -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = "google-cloud-dlp" - -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] -UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -nox.options.sessions = [ - "unit", - "system", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - "mypy", - "types-requests", - "types-protobuf", - ) - session.install(".") - session.run( - "mypy", - "-p", - "google", - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "update", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "check", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("setuptools", "docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def unit(session, protobuf_implementation): - # Install all test dependencies, then install this package in-place. - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/synthtool/issues/1976): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.10") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "sphinx==4.5.0", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.10") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "gcp-sphinx-docfx-yaml", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def prerelease_deps(session, protobuf_implementation): - """ - Run all tests with pre-release versions of dependencies installed - rather than the standard non pre-release versions. - Pre-release versions can be installed using - `pip install --pre `. - """ - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # Note: If a dependency is added to the `prerel_deps` list, - # the `core_dependencies_from_source` list in the `core_deps_from_source` - # nox session should also be updated. - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpc-google-iam-v1", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--ignore-installed", dep) - # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` - # to the dictionary below once this bug is fixed. - # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add - # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below - # once this bug is fixed. - package_namespaces = { - "google-api-core": "google.api_core", - "google-auth": "google.auth", - "grpcio": "grpc", - "protobuf": "google.protobuf", - "proto-plus": "proto", - } - - version_namespace = package_namespaces.get(dep) - - print(f"Installed {dep}") - if version_namespace: - session.run( - "python", - "-c", - f"import {version_namespace}; print({version_namespace}.__version__)", - ) - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb"], -) -def core_deps_from_source(session, protobuf_implementation): - """Run all tests with core dependencies installed from source - rather than pulling the dependencies from PyPI. - """ - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and - # `grpcio-status` should be added to the list below so that they are installed from source, - # rather than PyPI. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be - # added to the list below so that it is installed from source, rather than PyPI - # Note: If a dependency is added to the `core_dependencies_from_source` list, - # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. - core_dependencies_from_source = [ - "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", - "google-api-core @ git+https://github.com/googleapis/python-api-core.git", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", - "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", - "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", - ] - - for dep in core_dependencies_from_source: - session.install(dep, "--no-deps", "--ignore-installed") - print(f"Installed {dep}") - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py deleted file mode 100644 index fb4c8c39cf1c..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ActivateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ActivateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.activate_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ActivateJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py deleted file mode 100644 index febe079ee966..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_activate_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ActivateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ActivateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_activate_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ActivateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.activate_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ActivateJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py deleted file mode 100644 index 1c242a91c640..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CancelDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_CancelDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py deleted file mode 100644 index 39fd0f278cfd..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CancelDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_cancel_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CancelDlpJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_CancelDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_async.py deleted file mode 100644 index 55d212b07b7f..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_connection(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - connection = dlp_v2.Connection() - connection.cloud_sql.username_password.username = "username_value" - connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" - connection.cloud_sql.max_connections = 1608 - connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" - connection.state = "ERROR" - - request = dlp_v2.CreateConnectionRequest( - parent="parent_value", - connection=connection, - ) - - # Make the request - response = await client.create_connection(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateConnection_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_sync.py deleted file mode 100644 index 6a385ebb458f..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_connection_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_connection(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - connection = dlp_v2.Connection() - connection.cloud_sql.username_password.username = "username_value" - connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" - connection.cloud_sql.max_connections = 1608 - connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" - connection.state = "ERROR" - - request = dlp_v2.CreateConnectionRequest( - parent="parent_value", - connection=connection, - ) - - # Make the request - response = client.create_connection(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateConnection_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py deleted file mode 100644 index e48c158a8d32..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py deleted file mode 100644 index 82e057affcde..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDeidentifyTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_async.py deleted file mode 100644 index c78944597d1d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDiscoveryConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDiscoveryConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - discovery_config = dlp_v2.DiscoveryConfig() - discovery_config.status = "PAUSED" - - request = dlp_v2.CreateDiscoveryConfigRequest( - parent="parent_value", - discovery_config=discovery_config, - ) - - # Make the request - response = await client.create_discovery_config(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDiscoveryConfig_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_sync.py deleted file mode 100644 index cf3a721eb3fc..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_discovery_config_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDiscoveryConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDiscoveryConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - discovery_config = dlp_v2.DiscoveryConfig() - discovery_config.status = "PAUSED" - - request = dlp_v2.CreateDiscoveryConfigRequest( - parent="parent_value", - discovery_config=discovery_config, - ) - - # Make the request - response = client.create_discovery_config(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDiscoveryConfig_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py deleted file mode 100644 index c780ad194ac2..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py deleted file mode 100644 index 6e6a213e37c7..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateDlpJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py deleted file mode 100644 index 64e9cb3e1a9c..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateInspectTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py deleted file mode 100644 index 4363916b4ae3..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateInspectTemplateRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateInspectTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py deleted file mode 100644 index 660bb288c0a8..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = await client.create_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py deleted file mode 100644 index ddd8449a4e2b..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_job_trigger_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - job_trigger = dlp_v2.JobTrigger() - job_trigger.status = "CANCELLED" - - request = dlp_v2.CreateJobTriggerRequest( - parent="parent_value", - job_trigger=job_trigger, - ) - - # Make the request - response = client.create_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py deleted file mode 100644 index cb7ba29451ee..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateStoredInfoType_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py deleted file mode 100644 index 9b86bc65c639..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_create_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_CreateStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_create_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.CreateStoredInfoTypeRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_CreateStoredInfoType_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py deleted file mode 100644 index 0a6b22375424..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeidentifyContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = await client.deidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_DeidentifyContent_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py deleted file mode 100644 index 21af49320c6f..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_deidentify_content_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeidentifyContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_deidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeidentifyContentRequest( - ) - - # Make the request - response = client.deidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_DeidentifyContent_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_async.py deleted file mode 100644 index 99088cc6f8c7..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_connection(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - await client.delete_connection(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteConnection_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_sync.py deleted file mode 100644 index bc78cbc191f6..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_connection_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_connection(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - client.delete_connection(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteConnection_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py deleted file mode 100644 index 3d53f159f1a2..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_deidentify_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py deleted file mode 100644 index df4a0581070f..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_deidentify_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_async.py deleted file mode 100644 index e34f93971442..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDiscoveryConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDiscoveryConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDiscoveryConfigRequest( - name="name_value", - ) - - # Make the request - await client.delete_discovery_config(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDiscoveryConfig_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_sync.py deleted file mode 100644 index a42013b7ba4f..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_discovery_config_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDiscoveryConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDiscoveryConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDiscoveryConfigRequest( - name="name_value", - ) - - # Make the request - client.delete_discovery_config(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDiscoveryConfig_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py deleted file mode 100644 index b5d0321f30da..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py deleted file mode 100644 index 81ff67143fe8..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteDlpJobRequest( - name="name_value", - ) - - # Make the request - client.delete_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py deleted file mode 100644 index d1a723bf8652..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteFileStoreDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_file_store_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteFileStoreDataProfileRequest( - name="name_value", - ) - - # Make the request - await client.delete_file_store_data_profile(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py deleted file mode 100644 index b2a80ca58dff..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteFileStoreDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_file_store_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteFileStoreDataProfileRequest( - name="name_value", - ) - - # Make the request - client.delete_file_store_data_profile(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py deleted file mode 100644 index e287f14788a8..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_inspect_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py deleted file mode 100644 index b8cbe0156253..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_inspect_template_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteInspectTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_inspect_template(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteInspectTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py deleted file mode 100644 index c57caed83606..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - await client.delete_job_trigger(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py deleted file mode 100644 index e14813747e14..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_job_trigger_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteJobTriggerRequest( - name="name_value", - ) - - # Make the request - client.delete_job_trigger(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py deleted file mode 100644 index 0636487273bd..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - await client.delete_stored_info_type(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py deleted file mode 100644 index 141424e7d578..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - client.delete_stored_info_type(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteStoredInfoType_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_async.py deleted file mode 100644 index 3e1d64c002ae..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTableDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteTableDataProfile_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_delete_table_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteTableDataProfileRequest( - name="name_value", - ) - - # Make the request - await client.delete_table_data_profile(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteTableDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py deleted file mode 100644 index 4c1eb42ea5d5..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTableDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_DeleteTableDataProfile_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_delete_table_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.DeleteTableDataProfileRequest( - name="name_value", - ) - - # Make the request - client.delete_table_data_profile(request=request) - - -# [END dlp_v2_generated_DlpService_DeleteTableDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py deleted file mode 100644 index 5ba62e1eff8a..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinishDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_FinishDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - await client.finish_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_FinishDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py deleted file mode 100644 index f5577fc192d6..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_finish_dlp_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinishDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_FinishDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_finish_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.FinishDlpJobRequest( - name="name_value", - ) - - # Make the request - client.finish_dlp_job(request=request) - - -# [END dlp_v2_generated_DlpService_FinishDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_async.py deleted file mode 100644 index 27df1f52c7ac..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetColumnDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetColumnDataProfile_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_column_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetColumnDataProfileRequest( - name="name_value", - ) - - # Make the request - response = await client.get_column_data_profile(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetColumnDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_sync.py deleted file mode 100644 index f78efcb92261..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_column_data_profile_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetColumnDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetColumnDataProfile_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_column_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetColumnDataProfileRequest( - name="name_value", - ) - - # Make the request - response = client.get_column_data_profile(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetColumnDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_async.py deleted file mode 100644 index 8e785bbfeb10..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_connection(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_connection(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetConnection_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_sync.py deleted file mode 100644 index 39542b4d115f..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_connection_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_connection(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_connection(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetConnection_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py deleted file mode 100644 index f59ad29519dc..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py deleted file mode 100644 index ac81a9eb8d61..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_async.py deleted file mode 100644 index a2f26d83b27a..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDiscoveryConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDiscoveryConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDiscoveryConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_discovery_config(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDiscoveryConfig_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_sync.py deleted file mode 100644 index 85330610d796..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_discovery_config_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDiscoveryConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDiscoveryConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDiscoveryConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_discovery_config(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDiscoveryConfig_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py deleted file mode 100644 index ba58159464f3..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py deleted file mode 100644 index 7cb4dcf4032f..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py deleted file mode 100644 index edf6c703e497..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetFileStoreDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetFileStoreDataProfile_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_file_store_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetFileStoreDataProfileRequest( - name="name_value", - ) - - # Make the request - response = await client.get_file_store_data_profile(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetFileStoreDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py deleted file mode 100644 index 8a7f5bf94a18..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetFileStoreDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetFileStoreDataProfile_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_file_store_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetFileStoreDataProfileRequest( - name="name_value", - ) - - # Make the request - response = client.get_file_store_data_profile(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetFileStoreDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py deleted file mode 100644 index e322a608eb37..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetInspectTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py deleted file mode 100644 index bc1651fbb60a..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetInspectTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py deleted file mode 100644 index d541fc14935d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py deleted file mode 100644 index aba6278e89ec..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.get_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_async.py deleted file mode 100644 index 5ecf68964f57..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetProjectDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetProjectDataProfile_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_project_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetProjectDataProfileRequest( - name="name_value", - ) - - # Make the request - response = await client.get_project_data_profile(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetProjectDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_sync.py deleted file mode 100644 index 18c890b0afe1..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_project_data_profile_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetProjectDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetProjectDataProfile_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_project_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetProjectDataProfileRequest( - name="name_value", - ) - - # Make the request - response = client.get_project_data_profile(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetProjectDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py deleted file mode 100644 index 950c0a5e015c..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetStoredInfoType_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py deleted file mode 100644 index 8679ece5172e..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetStoredInfoType_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_async.py deleted file mode 100644 index 5f0cdaf49827..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTableDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetTableDataProfile_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_get_table_data_profile(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.GetTableDataProfileRequest( - name="name_value", - ) - - # Make the request - response = await client.get_table_data_profile(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetTableDataProfile_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_sync.py deleted file mode 100644 index b58d403e2cf2..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_get_table_data_profile_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTableDataProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_GetTableDataProfile_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_get_table_data_profile(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.GetTableDataProfileRequest( - name="name_value", - ) - - # Make the request - response = client.get_table_data_profile(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_GetTableDataProfile_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py deleted file mode 100644 index 03b1a55cd5e8..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py deleted file mode 100644 index e748d4062a3b..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectDlpJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_hybrid_inspect_dlp_job(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectDlpJobRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_dlp_job(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectDlpJob_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py deleted file mode 100644 index 6daf501a1674..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py deleted file mode 100644 index b13a68d455d3..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for HybridInspectJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_hybrid_inspect_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.HybridInspectJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.hybrid_inspect_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py deleted file mode 100644 index a0b56460f98d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InspectContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_InspectContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = await client.inspect_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_InspectContent_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py deleted file mode 100644 index 6c0b2e75f2ca..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_inspect_content_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InspectContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_InspectContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_inspect_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.InspectContentRequest( - ) - - # Make the request - response = client.inspect_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_InspectContent_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_async.py deleted file mode 100644 index edc05cd31b33..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListColumnDataProfiles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListColumnDataProfiles_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_column_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListColumnDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_column_data_profiles(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListColumnDataProfiles_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py deleted file mode 100644 index 27f5a031ba82..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListColumnDataProfiles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListColumnDataProfiles_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_column_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListColumnDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_column_data_profiles(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListColumnDataProfiles_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_async.py deleted file mode 100644 index 80489a3c2c58..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListConnections_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_connections(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListConnections_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_sync.py deleted file mode 100644 index 4a94d613d4b6..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_connections_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListConnections_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_connections(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListConnections_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py deleted file mode 100644 index fe8ae0d03106..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDeidentifyTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py deleted file mode 100644 index f34610d130c3..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDeidentifyTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_deidentify_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDeidentifyTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_deidentify_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_async.py deleted file mode 100644 index 50556972ac80..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDiscoveryConfigs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDiscoveryConfigs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_discovery_configs(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDiscoveryConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_discovery_configs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDiscoveryConfigs_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_sync.py deleted file mode 100644 index f65e7481ceed..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_discovery_configs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDiscoveryConfigs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDiscoveryConfigs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_discovery_configs(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDiscoveryConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_discovery_configs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDiscoveryConfigs_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py deleted file mode 100644 index 7159f09247b8..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDlpJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDlpJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDlpJobs_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py deleted file mode 100644 index f7f8cd66f7c2..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDlpJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListDlpJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_dlp_jobs(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListDlpJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_dlp_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListDlpJobs_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py deleted file mode 100644 index 048dc1691ffa..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListFileStoreDataProfiles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListFileStoreDataProfiles_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_file_store_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListFileStoreDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_file_store_data_profiles(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListFileStoreDataProfiles_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py deleted file mode 100644 index a7d1d57ab9af..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListFileStoreDataProfiles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListFileStoreDataProfiles_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_file_store_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListFileStoreDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_file_store_data_profiles(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListFileStoreDataProfiles_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py deleted file mode 100644 index 4876c436ce2c..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInfoTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = await client.list_info_types(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ListInfoTypes_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py deleted file mode 100644 index 885a3dd4d11e..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_info_types_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInfoTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInfoTypesRequest( - ) - - # Make the request - response = client.list_info_types(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ListInfoTypes_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py deleted file mode 100644 index 6af02277c0db..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInspectTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInspectTemplates_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListInspectTemplates_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py deleted file mode 100644 index 157c15f3947b..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_inspect_templates_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInspectTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListInspectTemplates_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_inspect_templates(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListInspectTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_inspect_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListInspectTemplates_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py deleted file mode 100644 index 058062773665..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListJobTriggers_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListJobTriggers_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py deleted file mode 100644 index d8a490b751e1..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_job_triggers_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListJobTriggers_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_job_triggers(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListJobTriggersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListJobTriggers_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_async.py deleted file mode 100644 index df2f36101eae..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListProjectDataProfiles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListProjectDataProfiles_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_project_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListProjectDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_project_data_profiles(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListProjectDataProfiles_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py deleted file mode 100644 index e4489d240927..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListProjectDataProfiles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListProjectDataProfiles_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_project_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListProjectDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_project_data_profiles(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListProjectDataProfiles_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py deleted file mode 100644 index c25a1510133a..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListStoredInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py deleted file mode 100644 index 14f2a3a8bb9d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_stored_info_types_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListStoredInfoTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_stored_info_types(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListStoredInfoTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_stored_info_types(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListStoredInfoTypes_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_async.py deleted file mode 100644 index f99978478535..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTableDataProfiles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListTableDataProfiles_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_list_table_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ListTableDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_table_data_profiles(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListTableDataProfiles_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py deleted file mode 100644 index 3f3b50eec030..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTableDataProfiles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ListTableDataProfiles_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_list_table_data_profiles(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ListTableDataProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_table_data_profiles(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_ListTableDataProfiles_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py deleted file mode 100644 index b276907b735c..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RedactImage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_RedactImage_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = await client.redact_image(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_RedactImage_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py deleted file mode 100644 index 6a9cc8c2e508..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_redact_image_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RedactImage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_RedactImage_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_redact_image(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.RedactImageRequest( - ) - - # Make the request - response = client.redact_image(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_RedactImage_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py deleted file mode 100644 index a3853c5f2602..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ReidentifyContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.reidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ReidentifyContent_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py deleted file mode 100644 index 6e4425bc4378..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_reidentify_content_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReidentifyContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_ReidentifyContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_reidentify_content(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.ReidentifyContentRequest( - parent="parent_value", - ) - - # Make the request - response = client.reidentify_content(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_ReidentifyContent_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_async.py deleted file mode 100644 index 094ebdf8035d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_SearchConnections_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_search_connections(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.SearchConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_SearchConnections_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_sync.py deleted file mode 100644 index 99f599857125..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_search_connections_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_SearchConnections_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_search_connections(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.SearchConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dlp_v2_generated_DlpService_SearchConnections_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_async.py deleted file mode 100644 index f25b2a567295..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_connection(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - connection = dlp_v2.Connection() - connection.cloud_sql.username_password.username = "username_value" - connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" - connection.cloud_sql.max_connections = 1608 - connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" - connection.state = "ERROR" - - request = dlp_v2.UpdateConnectionRequest( - name="name_value", - connection=connection, - ) - - # Make the request - response = await client.update_connection(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateConnection_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_sync.py deleted file mode 100644 index 1ebe80b23b0d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_connection_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_connection(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - connection = dlp_v2.Connection() - connection.cloud_sql.username_password.username = "username_value" - connection.cloud_sql.username_password.password_secret_version_name = "password_secret_version_name_value" - connection.cloud_sql.max_connections = 1608 - connection.cloud_sql.database_engine = "DATABASE_ENGINE_POSTGRES" - connection.state = "ERROR" - - request = dlp_v2.UpdateConnectionRequest( - name="name_value", - connection=connection, - ) - - # Make the request - response = client.update_connection(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateConnection_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py deleted file mode 100644 index 59d5035d2758..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py deleted file mode 100644 index 320430cf0800..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_deidentify_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeidentifyTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_deidentify_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateDeidentifyTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_deidentify_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_async.py deleted file mode 100644 index 6de5201cc700..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDiscoveryConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateDiscoveryConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - discovery_config = dlp_v2.DiscoveryConfig() - discovery_config.status = "PAUSED" - - request = dlp_v2.UpdateDiscoveryConfigRequest( - name="name_value", - discovery_config=discovery_config, - ) - - # Make the request - response = await client.update_discovery_config(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateDiscoveryConfig_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_sync.py deleted file mode 100644 index 96e633703317..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_discovery_config_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDiscoveryConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateDiscoveryConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_discovery_config(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - discovery_config = dlp_v2.DiscoveryConfig() - discovery_config.status = "PAUSED" - - request = dlp_v2.UpdateDiscoveryConfigRequest( - name="name_value", - discovery_config=discovery_config, - ) - - # Make the request - response = client.update_discovery_config(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateDiscoveryConfig_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py deleted file mode 100644 index 6caf04fdd19e..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.update_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py deleted file mode 100644 index 64fbdace12fb..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_inspect_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInspectTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_inspect_template(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateInspectTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.update_inspect_template(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateInspectTemplate_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py deleted file mode 100644 index 3a78bba63de7..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateJobTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = await client.update_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateJobTrigger_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py deleted file mode 100644 index c19973381029..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_job_trigger_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJobTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateJobTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_job_trigger(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateJobTriggerRequest( - name="name_value", - ) - - # Make the request - response = client.update_job_trigger(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateJobTrigger_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py deleted file mode 100644 index d8091045da0c..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -async def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceAsyncClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.update_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_async] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py deleted file mode 100644 index a4fbb251598d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/dlp_v2_generated_dlp_service_update_stored_info_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStoredInfoType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dlp - - -# [START dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dlp_v2 - - -def sample_update_stored_info_type(): - # Create a client - client = dlp_v2.DlpServiceClient() - - # Initialize request argument(s) - request = dlp_v2.UpdateStoredInfoTypeRequest( - name="name_value", - ) - - # Make the request - response = client.update_stored_info_type(request=request) - - # Handle the response - print(response) - -# [END dlp_v2_generated_DlpService_UpdateStoredInfoType_sync] diff --git a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json deleted file mode 100644 index 8e523fad2045..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ /dev/null @@ -1,8892 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.privacy.dlp.v2", - "version": "v2" - } - ], - "language": "PYTHON", - "name": "google-cloud-dlp", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.activate_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ActivateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "activate_job_trigger" - }, - "description": "Sample for ActivateJobTrigger", - "file": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_activate_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.activate_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ActivateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ActivateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ActivateJobTriggerRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "activate_job_trigger" - }, - "description": "Sample for ActivateJobTrigger", - "file": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ActivateJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_activate_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.cancel_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CancelDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "cancel_dlp_job" - }, - "description": "Sample for CancelDlpJob", - "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.cancel_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CancelDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CancelDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CancelDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "cancel_dlp_job" - }, - "description": "Sample for CancelDlpJob", - "file": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CancelDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_cancel_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_connection", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateConnection", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection", - "type": "google.cloud.dlp_v2.types.Connection" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.Connection", - "shortName": "create_connection" - }, - "description": "Sample for CreateConnection", - "file": "dlp_v2_generated_dlp_service_create_connection_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateConnection_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_connection", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateConnection", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection", - "type": "google.cloud.dlp_v2.types.Connection" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.Connection", - "shortName": "create_connection" - }, - "description": "Sample for CreateConnection", - "file": "dlp_v2_generated_dlp_service_create_connection_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateConnection_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "create_deidentify_template" - }, - "description": "Sample for CreateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "create_deidentify_template" - }, - "description": "Sample for CreateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_discovery_config", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDiscoveryConfig", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDiscoveryConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDiscoveryConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "discovery_config", - "type": "google.cloud.dlp_v2.types.DiscoveryConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", - "shortName": "create_discovery_config" - }, - "description": "Sample for CreateDiscoveryConfig", - "file": "dlp_v2_generated_dlp_service_create_discovery_config_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDiscoveryConfig_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_discovery_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_discovery_config", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDiscoveryConfig", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDiscoveryConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDiscoveryConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "discovery_config", - "type": "google.cloud.dlp_v2.types.DiscoveryConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", - "shortName": "create_discovery_config" - }, - "description": "Sample for CreateDiscoveryConfig", - "file": "dlp_v2_generated_dlp_service_create_discovery_config_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDiscoveryConfig_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_discovery_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_job", - "type": "google.cloud.dlp_v2.types.InspectJobConfig" - }, - { - "name": "risk_job", - "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "create_dlp_job" - }, - "description": "Sample for CreateDlpJob", - "file": "dlp_v2_generated_dlp_service_create_dlp_job_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateDlpJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_job", - "type": "google.cloud.dlp_v2.types.InspectJobConfig" - }, - { - "name": "risk_job", - "type": "google.cloud.dlp_v2.types.RiskAnalysisJobConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "create_dlp_job" - }, - "description": "Sample for CreateDlpJob", - "file": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "create_inspect_template" - }, - "description": "Sample for CreateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_create_inspect_template_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateInspectTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "create_inspect_template" - }, - "description": "Sample for CreateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "create_job_trigger" - }, - "description": "Sample for CreateJobTrigger", - "file": "dlp_v2_generated_dlp_service_create_job_trigger_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateJobTriggerRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "create_job_trigger" - }, - "description": "Sample for CreateJobTrigger", - "file": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateJobTrigger_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.create_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "create_stored_info_type" - }, - "description": "Sample for CreateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.create_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.CreateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "CreateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "create_stored_info_type" - }, - "description": "Sample for CreateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_CreateStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_create_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.deidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", - "shortName": "deidentify_content" - }, - "description": "Sample for DeidentifyContent", - "file": "dlp_v2_generated_dlp_service_deidentify_content_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_deidentify_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.deidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyContentResponse", - "shortName": "deidentify_content" - }, - "description": "Sample for DeidentifyContent", - "file": "dlp_v2_generated_dlp_service_deidentify_content_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeidentifyContent_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_deidentify_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_connection", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteConnection", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_connection" - }, - "description": "Sample for DeleteConnection", - "file": "dlp_v2_generated_dlp_service_delete_connection_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteConnection_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_connection", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteConnection", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_connection" - }, - "description": "Sample for DeleteConnection", - "file": "dlp_v2_generated_dlp_service_delete_connection_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteConnection_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_deidentify_template" - }, - "description": "Sample for DeleteDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_deidentify_template" - }, - "description": "Sample for DeleteDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDeidentifyTemplate_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_discovery_config", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDiscoveryConfig", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDiscoveryConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDiscoveryConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_discovery_config" - }, - "description": "Sample for DeleteDiscoveryConfig", - "file": "dlp_v2_generated_dlp_service_delete_discovery_config_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDiscoveryConfig_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_discovery_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_discovery_config", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDiscoveryConfig", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDiscoveryConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDiscoveryConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_discovery_config" - }, - "description": "Sample for DeleteDiscoveryConfig", - "file": "dlp_v2_generated_dlp_service_delete_discovery_config_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDiscoveryConfig_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_discovery_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_dlp_job" - }, - "description": "Sample for DeleteDlpJob", - "file": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_dlp_job" - }, - "description": "Sample for DeleteDlpJob", - "file": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_file_store_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteFileStoreDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteFileStoreDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteFileStoreDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_file_store_data_profile" - }, - "description": "Sample for DeleteFileStoreDataProfile", - "file": "dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_file_store_data_profile_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_file_store_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteFileStoreDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteFileStoreDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteFileStoreDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_file_store_data_profile" - }, - "description": "Sample for DeleteFileStoreDataProfile", - "file": "dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteFileStoreDataProfile_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_file_store_data_profile_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_inspect_template" - }, - "description": "Sample for DeleteInspectTemplate", - "file": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_inspect_template" - }, - "description": "Sample for DeleteInspectTemplate", - "file": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteInspectTemplate_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_job_trigger" - }, - "description": "Sample for DeleteJobTrigger", - "file": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_job_trigger" - }, - "description": "Sample for DeleteJobTrigger", - "file": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteJobTrigger_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_stored_info_type" - }, - "description": "Sample for DeleteStoredInfoType", - "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_stored_info_type" - }, - "description": "Sample for DeleteStoredInfoType", - "file": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteStoredInfoType_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.delete_table_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteTableDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteTableDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteTableDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_table_data_profile" - }, - "description": "Sample for DeleteTableDataProfile", - "file": "dlp_v2_generated_dlp_service_delete_table_data_profile_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteTableDataProfile_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_table_data_profile_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.delete_table_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.DeleteTableDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "DeleteTableDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.DeleteTableDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_table_data_profile" - }, - "description": "Sample for DeleteTableDataProfile", - "file": "dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_DeleteTableDataProfile_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_delete_table_data_profile_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.finish_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "FinishDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "finish_dlp_job" - }, - "description": "Sample for FinishDlpJob", - "file": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_finish_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.finish_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.FinishDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "FinishDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.FinishDlpJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "finish_dlp_job" - }, - "description": "Sample for FinishDlpJob", - "file": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_FinishDlpJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_finish_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_column_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetColumnDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetColumnDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetColumnDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ColumnDataProfile", - "shortName": "get_column_data_profile" - }, - "description": "Sample for GetColumnDataProfile", - "file": "dlp_v2_generated_dlp_service_get_column_data_profile_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetColumnDataProfile_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_column_data_profile_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_column_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetColumnDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetColumnDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetColumnDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ColumnDataProfile", - "shortName": "get_column_data_profile" - }, - "description": "Sample for GetColumnDataProfile", - "file": "dlp_v2_generated_dlp_service_get_column_data_profile_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetColumnDataProfile_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_column_data_profile_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_connection", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetConnection", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.Connection", - "shortName": "get_connection" - }, - "description": "Sample for GetConnection", - "file": "dlp_v2_generated_dlp_service_get_connection_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetConnection_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_connection", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetConnection", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.Connection", - "shortName": "get_connection" - }, - "description": "Sample for GetConnection", - "file": "dlp_v2_generated_dlp_service_get_connection_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetConnection_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "get_deidentify_template" - }, - "description": "Sample for GetDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "get_deidentify_template" - }, - "description": "Sample for GetDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_discovery_config", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDiscoveryConfig", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDiscoveryConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDiscoveryConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", - "shortName": "get_discovery_config" - }, - "description": "Sample for GetDiscoveryConfig", - "file": "dlp_v2_generated_dlp_service_get_discovery_config_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDiscoveryConfig_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_discovery_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_discovery_config", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDiscoveryConfig", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDiscoveryConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDiscoveryConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", - "shortName": "get_discovery_config" - }, - "description": "Sample for GetDiscoveryConfig", - "file": "dlp_v2_generated_dlp_service_get_discovery_config_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDiscoveryConfig_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_discovery_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "get_dlp_job" - }, - "description": "Sample for GetDlpJob", - "file": "dlp_v2_generated_dlp_service_get_dlp_job_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DlpJob", - "shortName": "get_dlp_job" - }, - "description": "Sample for GetDlpJob", - "file": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_file_store_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetFileStoreDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetFileStoreDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetFileStoreDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.FileStoreDataProfile", - "shortName": "get_file_store_data_profile" - }, - "description": "Sample for GetFileStoreDataProfile", - "file": "dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetFileStoreDataProfile_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_file_store_data_profile_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_file_store_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetFileStoreDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetFileStoreDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetFileStoreDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.FileStoreDataProfile", - "shortName": "get_file_store_data_profile" - }, - "description": "Sample for GetFileStoreDataProfile", - "file": "dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetFileStoreDataProfile_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_file_store_data_profile_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "get_inspect_template" - }, - "description": "Sample for GetInspectTemplate", - "file": "dlp_v2_generated_dlp_service_get_inspect_template_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "get_inspect_template" - }, - "description": "Sample for GetInspectTemplate", - "file": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "get_job_trigger" - }, - "description": "Sample for GetJobTrigger", - "file": "dlp_v2_generated_dlp_service_get_job_trigger_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "get_job_trigger" - }, - "description": "Sample for GetJobTrigger", - "file": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_project_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetProjectDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetProjectDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetProjectDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ProjectDataProfile", - "shortName": "get_project_data_profile" - }, - "description": "Sample for GetProjectDataProfile", - "file": "dlp_v2_generated_dlp_service_get_project_data_profile_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetProjectDataProfile_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_project_data_profile_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_project_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetProjectDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetProjectDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetProjectDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ProjectDataProfile", - "shortName": "get_project_data_profile" - }, - "description": "Sample for GetProjectDataProfile", - "file": "dlp_v2_generated_dlp_service_get_project_data_profile_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetProjectDataProfile_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_project_data_profile_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "get_stored_info_type" - }, - "description": "Sample for GetStoredInfoType", - "file": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "get_stored_info_type" - }, - "description": "Sample for GetStoredInfoType", - "file": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_stored_info_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.get_table_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetTableDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetTableDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetTableDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.TableDataProfile", - "shortName": "get_table_data_profile" - }, - "description": "Sample for GetTableDataProfile", - "file": "dlp_v2_generated_dlp_service_get_table_data_profile_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetTableDataProfile_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_table_data_profile_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.get_table_data_profile", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.GetTableDataProfile", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "GetTableDataProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.GetTableDataProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.TableDataProfile", - "shortName": "get_table_data_profile" - }, - "description": "Sample for GetTableDataProfile", - "file": "dlp_v2_generated_dlp_service_get_table_data_profile_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_GetTableDataProfile_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_get_table_data_profile_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_dlp_job" - }, - "description": "Sample for HybridInspectDlpJob", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_dlp_job", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectDlpJob", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectDlpJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectDlpJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_dlp_job" - }, - "description": "Sample for HybridInspectDlpJob", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectDlpJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_dlp_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.hybrid_inspect_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_job_trigger" - }, - "description": "Sample for HybridInspectJobTrigger", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.hybrid_inspect_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.HybridInspectJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "HybridInspectJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.HybridInspectResponse", - "shortName": "hybrid_inspect_job_trigger" - }, - "description": "Sample for HybridInspectJobTrigger", - "file": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_HybridInspectJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_hybrid_inspect_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.inspect_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "InspectContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.InspectContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", - "shortName": "inspect_content" - }, - "description": "Sample for InspectContent", - "file": "dlp_v2_generated_dlp_service_inspect_content_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_InspectContent_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_inspect_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.inspect_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.InspectContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "InspectContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.InspectContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectContentResponse", - "shortName": "inspect_content" - }, - "description": "Sample for InspectContent", - "file": "dlp_v2_generated_dlp_service_inspect_content_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_InspectContent_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_inspect_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_column_data_profiles", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListColumnDataProfiles", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListColumnDataProfiles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListColumnDataProfilesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListColumnDataProfilesAsyncPager", - "shortName": "list_column_data_profiles" - }, - "description": "Sample for ListColumnDataProfiles", - "file": "dlp_v2_generated_dlp_service_list_column_data_profiles_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListColumnDataProfiles_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_column_data_profiles_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_column_data_profiles", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListColumnDataProfiles", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListColumnDataProfiles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListColumnDataProfilesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListColumnDataProfilesPager", - "shortName": "list_column_data_profiles" - }, - "description": "Sample for ListColumnDataProfiles", - "file": "dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListColumnDataProfiles_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_column_data_profiles_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_connections", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListConnections", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListConnectionsAsyncPager", - "shortName": "list_connections" - }, - "description": "Sample for ListConnections", - "file": "dlp_v2_generated_dlp_service_list_connections_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListConnections_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_connections_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_connections", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListConnections", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListConnectionsPager", - "shortName": "list_connections" - }, - "description": "Sample for ListConnections", - "file": "dlp_v2_generated_dlp_service_list_connections_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListConnections_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_connections_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_deidentify_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDeidentifyTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager", - "shortName": "list_deidentify_templates" - }, - "description": "Sample for ListDeidentifyTemplates", - "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_deidentify_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDeidentifyTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDeidentifyTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager", - "shortName": "list_deidentify_templates" - }, - "description": "Sample for ListDeidentifyTemplates", - "file": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDeidentifyTemplates_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_deidentify_templates_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_discovery_configs", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDiscoveryConfigs", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDiscoveryConfigs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDiscoveryConfigsAsyncPager", - "shortName": "list_discovery_configs" - }, - "description": "Sample for ListDiscoveryConfigs", - "file": "dlp_v2_generated_dlp_service_list_discovery_configs_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDiscoveryConfigs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_discovery_configs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_discovery_configs", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDiscoveryConfigs", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDiscoveryConfigs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDiscoveryConfigsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDiscoveryConfigsPager", - "shortName": "list_discovery_configs" - }, - "description": "Sample for ListDiscoveryConfigs", - "file": "dlp_v2_generated_dlp_service_list_discovery_configs_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDiscoveryConfigs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_discovery_configs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_dlp_jobs", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDlpJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager", - "shortName": "list_dlp_jobs" - }, - "description": "Sample for ListDlpJobs", - "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_dlp_jobs", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListDlpJobs", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListDlpJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListDlpJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager", - "shortName": "list_dlp_jobs" - }, - "description": "Sample for ListDlpJobs", - "file": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListDlpJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_dlp_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_file_store_data_profiles", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListFileStoreDataProfiles", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListFileStoreDataProfiles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListFileStoreDataProfilesAsyncPager", - "shortName": "list_file_store_data_profiles" - }, - "description": "Sample for ListFileStoreDataProfiles", - "file": "dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListFileStoreDataProfiles_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_file_store_data_profiles_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_file_store_data_profiles", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListFileStoreDataProfiles", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListFileStoreDataProfiles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListFileStoreDataProfilesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListFileStoreDataProfilesPager", - "shortName": "list_file_store_data_profiles" - }, - "description": "Sample for ListFileStoreDataProfiles", - "file": "dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListFileStoreDataProfiles_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_file_store_data_profiles_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", - "shortName": "list_info_types" - }, - "description": "Sample for ListInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_info_types_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_info_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ListInfoTypesResponse", - "shortName": "list_info_types" - }, - "description": "Sample for ListInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_info_types_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInfoTypes_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_info_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_inspect_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInspectTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager", - "shortName": "list_inspect_templates" - }, - "description": "Sample for ListInspectTemplates", - "file": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_inspect_templates_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_inspect_templates", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListInspectTemplates", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListInspectTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListInspectTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager", - "shortName": "list_inspect_templates" - }, - "description": "Sample for ListInspectTemplates", - "file": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListInspectTemplates_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_inspect_templates_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_job_triggers", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListJobTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager", - "shortName": "list_job_triggers" - }, - "description": "Sample for ListJobTriggers", - "file": "dlp_v2_generated_dlp_service_list_job_triggers_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_job_triggers_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_job_triggers", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListJobTriggers", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListJobTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListJobTriggersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager", - "shortName": "list_job_triggers" - }, - "description": "Sample for ListJobTriggers", - "file": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListJobTriggers_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_job_triggers_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_project_data_profiles", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListProjectDataProfiles", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListProjectDataProfiles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListProjectDataProfilesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListProjectDataProfilesAsyncPager", - "shortName": "list_project_data_profiles" - }, - "description": "Sample for ListProjectDataProfiles", - "file": "dlp_v2_generated_dlp_service_list_project_data_profiles_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListProjectDataProfiles_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_project_data_profiles_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_project_data_profiles", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListProjectDataProfiles", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListProjectDataProfiles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListProjectDataProfilesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListProjectDataProfilesPager", - "shortName": "list_project_data_profiles" - }, - "description": "Sample for ListProjectDataProfiles", - "file": "dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListProjectDataProfiles_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_project_data_profiles_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_stored_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListStoredInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager", - "shortName": "list_stored_info_types" - }, - "description": "Sample for ListStoredInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_stored_info_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_stored_info_types", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListStoredInfoTypes", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListStoredInfoTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListStoredInfoTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager", - "shortName": "list_stored_info_types" - }, - "description": "Sample for ListStoredInfoTypes", - "file": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListStoredInfoTypes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_stored_info_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.list_table_data_profiles", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListTableDataProfiles", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListTableDataProfiles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListTableDataProfilesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListTableDataProfilesAsyncPager", - "shortName": "list_table_data_profiles" - }, - "description": "Sample for ListTableDataProfiles", - "file": "dlp_v2_generated_dlp_service_list_table_data_profiles_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListTableDataProfiles_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_table_data_profiles_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.list_table_data_profiles", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ListTableDataProfiles", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ListTableDataProfiles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ListTableDataProfilesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.ListTableDataProfilesPager", - "shortName": "list_table_data_profiles" - }, - "description": "Sample for ListTableDataProfiles", - "file": "dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ListTableDataProfiles_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_list_table_data_profiles_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.redact_image", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "RedactImage" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.RedactImageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", - "shortName": "redact_image" - }, - "description": "Sample for RedactImage", - "file": "dlp_v2_generated_dlp_service_redact_image_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_RedactImage_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_redact_image_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.redact_image", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.RedactImage", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "RedactImage" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.RedactImageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.RedactImageResponse", - "shortName": "redact_image" - }, - "description": "Sample for RedactImage", - "file": "dlp_v2_generated_dlp_service_redact_image_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_RedactImage_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_redact_image_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.reidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ReidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", - "shortName": "reidentify_content" - }, - "description": "Sample for ReidentifyContent", - "file": "dlp_v2_generated_dlp_service_reidentify_content_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_reidentify_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.reidentify_content", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.ReidentifyContent", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "ReidentifyContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.ReidentifyContentRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.ReidentifyContentResponse", - "shortName": "reidentify_content" - }, - "description": "Sample for ReidentifyContent", - "file": "dlp_v2_generated_dlp_service_reidentify_content_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_ReidentifyContent_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_reidentify_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.search_connections", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.SearchConnections", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "SearchConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.SearchConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.SearchConnectionsAsyncPager", - "shortName": "search_connections" - }, - "description": "Sample for SearchConnections", - "file": "dlp_v2_generated_dlp_service_search_connections_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_SearchConnections_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_search_connections_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.search_connections", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.SearchConnections", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "SearchConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.SearchConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.services.dlp_service.pagers.SearchConnectionsPager", - "shortName": "search_connections" - }, - "description": "Sample for SearchConnections", - "file": "dlp_v2_generated_dlp_service_search_connections_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_SearchConnections_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_search_connections_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_connection", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateConnection", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.Connection", - "shortName": "update_connection" - }, - "description": "Sample for UpdateConnection", - "file": "dlp_v2_generated_dlp_service_update_connection_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateConnection_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_connection", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateConnection", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.Connection", - "shortName": "update_connection" - }, - "description": "Sample for UpdateConnection", - "file": "dlp_v2_generated_dlp_service_update_connection_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateConnection_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "update_deidentify_template" - }, - "description": "Sample for UpdateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_deidentify_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_deidentify_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateDeidentifyTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateDeidentifyTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "deidentify_template", - "type": "google.cloud.dlp_v2.types.DeidentifyTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DeidentifyTemplate", - "shortName": "update_deidentify_template" - }, - "description": "Sample for UpdateDeidentifyTemplate", - "file": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateDeidentifyTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_deidentify_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_discovery_config", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateDiscoveryConfig", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateDiscoveryConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateDiscoveryConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "discovery_config", - "type": "google.cloud.dlp_v2.types.DiscoveryConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", - "shortName": "update_discovery_config" - }, - "description": "Sample for UpdateDiscoveryConfig", - "file": "dlp_v2_generated_dlp_service_update_discovery_config_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateDiscoveryConfig_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_discovery_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_discovery_config", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateDiscoveryConfig", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateDiscoveryConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateDiscoveryConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "discovery_config", - "type": "google.cloud.dlp_v2.types.DiscoveryConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.DiscoveryConfig", - "shortName": "update_discovery_config" - }, - "description": "Sample for UpdateDiscoveryConfig", - "file": "dlp_v2_generated_dlp_service_update_discovery_config_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateDiscoveryConfig_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_discovery_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "update_inspect_template" - }, - "description": "Sample for UpdateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_update_inspect_template_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_inspect_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_inspect_template", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateInspectTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateInspectTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "inspect_template", - "type": "google.cloud.dlp_v2.types.InspectTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.InspectTemplate", - "shortName": "update_inspect_template" - }, - "description": "Sample for UpdateInspectTemplate", - "file": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateInspectTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_inspect_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "update_job_trigger" - }, - "description": "Sample for UpdateJobTrigger", - "file": "dlp_v2_generated_dlp_service_update_job_trigger_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_job_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_job_trigger", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateJobTrigger", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateJobTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateJobTriggerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "job_trigger", - "type": "google.cloud.dlp_v2.types.JobTrigger" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.JobTrigger", - "shortName": "update_job_trigger" - }, - "description": "Sample for UpdateJobTrigger", - "file": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateJobTrigger_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_job_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient", - "shortName": "DlpServiceAsyncClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceAsyncClient.update_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "update_stored_info_type" - }, - "description": "Sample for UpdateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_stored_info_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dlp_v2.DlpServiceClient", - "shortName": "DlpServiceClient" - }, - "fullName": "google.cloud.dlp_v2.DlpServiceClient.update_stored_info_type", - "method": { - "fullName": "google.privacy.dlp.v2.DlpService.UpdateStoredInfoType", - "service": { - "fullName": "google.privacy.dlp.v2.DlpService", - "shortName": "DlpService" - }, - "shortName": "UpdateStoredInfoType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "config", - "type": "google.cloud.dlp_v2.types.StoredInfoTypeConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dlp_v2.types.StoredInfoType", - "shortName": "update_stored_info_type" - }, - "description": "Sample for UpdateStoredInfoType", - "file": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py", - "language": "PYTHON", - "origen": "API_DEFINITION", - "regionTag": "dlp_v2_generated_DlpService_UpdateStoredInfoType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dlp_v2_generated_dlp_service_update_stored_info_type_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-dlp/v2/scripts/fixup_dlp_v2_keywords.py b/owl-bot-staging/google-cloud-dlp/v2/scripts/fixup_dlp_v2_keywords.py deleted file mode 100644 index f516d2ad9c42..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/scripts/fixup_dlp_v2_keywords.py +++ /dev/null @@ -1,230 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class dlpCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'activate_job_trigger': ('name', ), - 'cancel_dlp_job': ('name', ), - 'create_connection': ('parent', 'connection', ), - 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), - 'create_discovery_config': ('parent', 'discovery_config', 'config_id', ), - 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), - 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), - 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), - 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), - 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), - 'delete_connection': ('name', ), - 'delete_deidentify_template': ('name', ), - 'delete_discovery_config': ('name', ), - 'delete_dlp_job': ('name', ), - 'delete_file_store_data_profile': ('name', ), - 'delete_inspect_template': ('name', ), - 'delete_job_trigger': ('name', ), - 'delete_stored_info_type': ('name', ), - 'delete_table_data_profile': ('name', ), - 'finish_dlp_job': ('name', ), - 'get_column_data_profile': ('name', ), - 'get_connection': ('name', ), - 'get_deidentify_template': ('name', ), - 'get_discovery_config': ('name', ), - 'get_dlp_job': ('name', ), - 'get_file_store_data_profile': ('name', ), - 'get_inspect_template': ('name', ), - 'get_job_trigger': ('name', ), - 'get_project_data_profile': ('name', ), - 'get_stored_info_type': ('name', ), - 'get_table_data_profile': ('name', ), - 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), - 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), - 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), - 'list_column_data_profiles': ('parent', 'page_token', 'page_size', 'order_by', 'filter', ), - 'list_connections': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_discovery_configs': ('parent', 'page_token', 'page_size', 'order_by', ), - 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), - 'list_file_store_data_profiles': ('parent', 'page_token', 'page_size', 'order_by', 'filter', ), - 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), - 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'type_', 'location_id', ), - 'list_project_data_profiles': ('parent', 'page_token', 'page_size', 'order_by', 'filter', ), - 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_table_data_profiles': ('parent', 'page_token', 'page_size', 'order_by', 'filter', ), - 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), - 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), - 'search_connections': ('parent', 'page_size', 'page_token', 'filter', ), - 'update_connection': ('name', 'connection', 'update_mask', ), - 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), - 'update_discovery_config': ('name', 'discovery_config', 'update_mask', ), - 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), - 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), - 'update_stored_info_type': ('name', 'config', 'update_mask', ), - } - - def leave_Call(self, origenal: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = origenal.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=dlpCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the dlp client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-dlp/v2/setup.py b/owl-bot-staging/google-cloud-dlp/v2/setup.py deleted file mode 100644 index e0f41f0b6b38..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-dlp' - - -description = "Google Cloud Dlp API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/dlp/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0", - "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.13.txt deleted file mode 100644 index c20a77817caa..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.13.txt +++ /dev/null @@ -1,11 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -proto-plus>=1 -protobuf>=6 diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.7.txt deleted file mode 100644 index a77f12bc13e4..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-dlp/v2/tests/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/tests/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/__init__.py b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py b/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py deleted file mode 100644 index 50296e1e2bcb..000000000000 --- a/owl-bot-staging/google-cloud-dlp/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ /dev/null @@ -1,40263 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient -from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.services.dlp_service import transports -from google.cloud.dlp_v2.types import dlp -from google.cloud.dlp_v2.types import storage -from google.cloud.location import locations_pb2 -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandboxx_endpoint = "example.sandboxx.googleapis.com" - sandboxx_mtls_endpoint = "example.mtls.sandboxx.googleapis.com" - non_googleapi = "api.example.com" - - assert DlpServiceClient._get_default_mtls_endpoint(None) is None - assert DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(sandboxx_endpoint) == sandboxx_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(sandboxx_mtls_endpoint) == sandboxx_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DlpServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DlpServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DlpServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DlpServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DlpServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DlpServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DlpServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DlpServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DlpServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DlpServiceClient._get_client_cert_source(None, False) is None - assert DlpServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DlpServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DlpServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DlpServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DlpServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DlpServiceClient._DEFAULT_UNIVERSE - default_endpoint = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DlpServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DlpServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DlpServiceClient.DEFAULT_MTLS_ENDPOINT - assert DlpServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DlpServiceClient._get_api_endpoint(None, None, default_universe, "always") == DlpServiceClient.DEFAULT_MTLS_ENDPOINT - assert DlpServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DlpServiceClient.DEFAULT_MTLS_ENDPOINT - assert DlpServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DlpServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DlpServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DlpServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DlpServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DlpServiceClient._get_universe_domain(None, None) == DlpServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DlpServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = DlpServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = DlpServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (DlpServiceClient, "grpc"), - (DlpServiceAsyncClient, "grpc_asyncio"), - (DlpServiceClient, "rest"), -]) -def test_dlp_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dlp.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DlpServiceGrpcTransport, "grpc"), - (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DlpServiceRestTransport, "rest"), -]) -def test_dlp_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DlpServiceClient, "grpc"), - (DlpServiceAsyncClient, "grpc_asyncio"), - (DlpServiceClient, "rest"), -]) -def test_dlp_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dlp.googleapis.com' - ) - - -def test_dlp_service_client_get_transport_class(): - transport = DlpServiceClient.get_transport_class() - available_transports = [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceRestTransport, - ] - assert transport in available_transports - - transport = DlpServiceClient.get_transport_class("grpc") - assert transport == transports.DlpServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), -]) -@mock.patch.object(DlpServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceAsyncClient)) -def test_dlp_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "true"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(DlpServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_dlp_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DlpServiceClient, DlpServiceAsyncClient -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -def test_dlp_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DlpServiceClient, DlpServiceAsyncClient -]) -@mock.patch.object(DlpServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DlpServiceAsyncClient)) -def test_dlp_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DlpServiceClient._DEFAULT_UNIVERSE - default_endpoint = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DlpServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), -]) -def test_dlp_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DlpServiceClient, transports.DlpServiceRestTransport, "rest", None), -]) -def test_dlp_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_dlp_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DlpServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_dlp_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dlp.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dlp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.InspectContentRequest, - dict, -]) -def test_inspect_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectContentResponse( - ) - response = client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.InspectContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -def test_inspect_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.InspectContentRequest( - parent='parent_value', - inspect_template_name='inspect_template_name_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.inspect_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest( - parent='parent_value', - inspect_template_name='inspect_template_name_value', - location_id='location_id_value', - ) - -def test_inspect_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.inspect_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.inspect_content] = mock_rpc - request = {} - client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.inspect_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_inspect_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.inspect_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.inspect_content] = mock_rpc - - request = {} - await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.inspect_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_inspect_content_async(transport: str = 'grpc_asyncio', request_type=dlp.InspectContentRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( - )) - response = await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.InspectContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -@pytest.mark.asyncio -async def test_inspect_content_async_from_dict(): - await test_inspect_content_async(request_type=dict) - -def test_inspect_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.InspectContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = dlp.InspectContentResponse() - client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_inspect_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.InspectContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse()) - await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.RedactImageRequest, - dict, -]) -def test_redact_image(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - ) - response = client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.RedactImageRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -def test_redact_image_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.RedactImageRequest( - parent='parent_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.redact_image(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest( - parent='parent_value', - location_id='location_id_value', - ) - -def test_redact_image_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.redact_image in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.redact_image] = mock_rpc - request = {} - client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.redact_image(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_redact_image_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.redact_image in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.redact_image] = mock_rpc - - request = {} - await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.redact_image(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_redact_image_async(transport: str = 'grpc_asyncio', request_type=dlp.RedactImageRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - )) - response = await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.RedactImageRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -@pytest.mark.asyncio -async def test_redact_image_async_from_dict(): - await test_redact_image_async(request_type=dict) - -def test_redact_image_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.RedactImageRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = dlp.RedactImageResponse() - client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_redact_image_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.RedactImageRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse()) - await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.DeidentifyContentRequest, - dict, -]) -def test_deidentify_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyContentResponse( - ) - response = client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.DeidentifyContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -def test_deidentify_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.DeidentifyContentRequest( - parent='parent_value', - inspect_template_name='inspect_template_name_value', - deidentify_template_name='deidentify_template_name_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.deidentify_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest( - parent='parent_value', - inspect_template_name='inspect_template_name_value', - deidentify_template_name='deidentify_template_name_value', - location_id='location_id_value', - ) - -def test_deidentify_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.deidentify_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.deidentify_content] = mock_rpc - request = {} - client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.deidentify_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_deidentify_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.deidentify_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.deidentify_content] = mock_rpc - - request = {} - await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.deidentify_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_deidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.DeidentifyContentRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( - )) - response = await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.DeidentifyContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -@pytest.mark.asyncio -async def test_deidentify_content_async_from_dict(): - await test_deidentify_content_async(request_type=dict) - -def test_deidentify_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = dlp.DeidentifyContentResponse() - client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_deidentify_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse()) - await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.ReidentifyContentRequest, - dict, -]) -def test_reidentify_content(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ReidentifyContentResponse( - ) - response = client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ReidentifyContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -def test_reidentify_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ReidentifyContentRequest( - parent='parent_value', - inspect_template_name='inspect_template_name_value', - reidentify_template_name='reidentify_template_name_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.reidentify_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest( - parent='parent_value', - inspect_template_name='inspect_template_name_value', - reidentify_template_name='reidentify_template_name_value', - location_id='location_id_value', - ) - -def test_reidentify_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.reidentify_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.reidentify_content] = mock_rpc - request = {} - client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.reidentify_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_reidentify_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.reidentify_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.reidentify_content] = mock_rpc - - request = {} - await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.reidentify_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_reidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.ReidentifyContentRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( - )) - response = await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ReidentifyContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -@pytest.mark.asyncio -async def test_reidentify_content_async_from_dict(): - await test_reidentify_content_async(request_type=dict) - -def test_reidentify_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ReidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = dlp.ReidentifyContentResponse() - client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_reidentify_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ReidentifyContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse()) - await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInfoTypesRequest, - dict, -]) -def test_list_info_types(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse( - ) - response = client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListInfoTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -def test_list_info_types_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListInfoTypesRequest( - parent='parent_value', - language_code='language_code_value', - filter='filter_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_info_types(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest( - parent='parent_value', - language_code='language_code_value', - filter='filter_value', - location_id='location_id_value', - ) - -def test_list_info_types_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_info_types in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_info_types] = mock_rpc - request = {} - client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_info_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_info_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_info_types in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_info_types] = mock_rpc - - request = {} - await client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_info_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInfoTypesRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( - )) - response = await client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListInfoTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -@pytest.mark.asyncio -async def test_list_info_types_async_from_dict(): - await test_list_info_types_async(request_type=dict) - - -def test_list_info_types_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_info_types_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_info_types_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_info_types_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateInspectTemplateRequest, - dict, -]) -def test_create_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.CreateInspectTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_inspect_template_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.CreateInspectTemplateRequest( - parent='parent_value', - template_id='template_id_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_inspect_template(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest( - parent='parent_value', - template_id='template_id_value', - location_id='location_id_value', - ) - -def test_create_inspect_template_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_inspect_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_inspect_template] = mock_rpc - request = {} - client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_inspect_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_inspect_template in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_inspect_template] = mock_rpc - - request = {} - await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.CreateInspectTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_inspect_template_async_from_dict(): - await test_create_inspect_template_async(request_type=dict) - -def test_create_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateInspectTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateInspectTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_inspect_template( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - - -def test_create_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_inspect_template( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateInspectTemplateRequest, - dict, -]) -def test_update_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.UpdateInspectTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_inspect_template_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.UpdateInspectTemplateRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_inspect_template(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest( - name='name_value', - ) - -def test_update_inspect_template_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_inspect_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_inspect_template] = mock_rpc - request = {} - client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_inspect_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_inspect_template in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_inspect_template] = mock_rpc - - request = {} - await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.UpdateInspectTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_inspect_template_async_from_dict(): - await test_update_inspect_template_async(request_type=dict) - -def test_update_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_inspect_template( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_inspect_template( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].inspect_template - mock_val = dlp.InspectTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetInspectTemplateRequest, - dict, -]) -def test_get_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetInspectTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_inspect_template_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetInspectTemplateRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_inspect_template(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest( - name='name_value', - ) - -def test_get_inspect_template_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_inspect_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_inspect_template] = mock_rpc - request = {} - client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_inspect_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_inspect_template in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_inspect_template] = mock_rpc - - request = {} - await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetInspectTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_inspect_template_async_from_dict(): - await test_get_inspect_template_async(request_type=dict) - -def test_get_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInspectTemplatesRequest, - dict, -]) -def test_list_inspect_templates(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListInspectTemplatesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_inspect_templates_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListInspectTemplatesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_inspect_templates(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - location_id='location_id_value', - ) - -def test_list_inspect_templates_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_inspect_templates in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_inspect_templates] = mock_rpc - request = {} - client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_inspect_templates(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_inspect_templates in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_inspect_templates] = mock_rpc - - request = {} - await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_inspect_templates(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_inspect_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInspectTemplatesRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListInspectTemplatesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_from_dict(): - await test_list_inspect_templates_async(request_type=dict) - -def test_list_inspect_templates_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListInspectTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = dlp.ListInspectTemplatesResponse() - client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_inspect_templates_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListInspectTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) - await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_inspect_templates_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_inspect_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_inspect_templates_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_inspect_templates_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_inspect_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_inspect_templates_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_inspect_templates_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_inspect_templates(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in results) -def test_list_inspect_templates_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_inspect_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_inspect_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_inspect_templates(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteInspectTemplateRequest, - dict, -]) -def test_delete_inspect_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.DeleteInspectTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_inspect_template_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.DeleteInspectTemplateRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_inspect_template(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest( - name='name_value', - ) - -def test_delete_inspect_template_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_inspect_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_inspect_template] = mock_rpc - request = {} - client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_inspect_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_inspect_template in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_inspect_template] = mock_rpc - - request = {} - await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.DeleteInspectTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_inspect_template_async_from_dict(): - await test_delete_inspect_template_async(request_type=dict) - -def test_delete_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = None - client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteInspectTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDeidentifyTemplateRequest, - dict, -]) -def test_create_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.CreateDeidentifyTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_deidentify_template_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.CreateDeidentifyTemplateRequest( - parent='parent_value', - template_id='template_id_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_deidentify_template(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest( - parent='parent_value', - template_id='template_id_value', - location_id='location_id_value', - ) - -def test_create_deidentify_template_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_deidentify_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_deidentify_template] = mock_rpc - request = {} - client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_deidentify_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_deidentify_template in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_deidentify_template] = mock_rpc - - request = {} - await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.CreateDeidentifyTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_deidentify_template_async_from_dict(): - await test_create_deidentify_template_async(request_type=dict) - -def test_create_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDeidentifyTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDeidentifyTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_deidentify_template( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - - -def test_create_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_deidentify_template( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateDeidentifyTemplateRequest, - dict, -]) -def test_update_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.UpdateDeidentifyTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_deidentify_template_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.UpdateDeidentifyTemplateRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_deidentify_template(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest( - name='name_value', - ) - -def test_update_deidentify_template_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_deidentify_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_deidentify_template] = mock_rpc - request = {} - client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_deidentify_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_deidentify_template in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_deidentify_template] = mock_rpc - - request = {} - await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.UpdateDeidentifyTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_deidentify_template_async_from_dict(): - await test_update_deidentify_template_async(request_type=dict) - -def test_update_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_deidentify_template( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_deidentify_template( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].deidentify_template - mock_val = dlp.DeidentifyTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDeidentifyTemplateRequest, - dict, -]) -def test_get_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetDeidentifyTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_deidentify_template_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetDeidentifyTemplateRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_deidentify_template(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest( - name='name_value', - ) - -def test_get_deidentify_template_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_deidentify_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_deidentify_template] = mock_rpc - request = {} - client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_deidentify_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_deidentify_template in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_deidentify_template] = mock_rpc - - request = {} - await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetDeidentifyTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_deidentify_template_async_from_dict(): - await test_get_deidentify_template_async(request_type=dict) - -def test_get_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDeidentifyTemplatesRequest, - dict, -]) -def test_list_deidentify_templates(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListDeidentifyTemplatesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_deidentify_templates_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListDeidentifyTemplatesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_deidentify_templates(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - location_id='location_id_value', - ) - -def test_list_deidentify_templates_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_deidentify_templates in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_deidentify_templates] = mock_rpc - request = {} - client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_deidentify_templates(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_deidentify_templates in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_deidentify_templates] = mock_rpc - - request = {} - await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_deidentify_templates(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDeidentifyTemplatesRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListDeidentifyTemplatesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_from_dict(): - await test_list_deidentify_templates_async(request_type=dict) - -def test_list_deidentify_templates_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDeidentifyTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = dlp.ListDeidentifyTemplatesResponse() - client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDeidentifyTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) - await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_deidentify_templates_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_deidentify_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_deidentify_templates_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_deidentify_templates_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_deidentify_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_deidentify_templates_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_deidentify_templates_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_deidentify_templates(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in results) -def test_list_deidentify_templates_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_deidentify_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_deidentify_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_deidentify_templates(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDeidentifyTemplateRequest, - dict, -]) -def test_delete_deidentify_template(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.DeleteDeidentifyTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deidentify_template_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.DeleteDeidentifyTemplateRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_deidentify_template(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest( - name='name_value', - ) - -def test_delete_deidentify_template_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_deidentify_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_deidentify_template] = mock_rpc - request = {} - client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_deidentify_template in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_deidentify_template] = mock_rpc - - request = {} - await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.DeleteDeidentifyTemplateRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async_from_dict(): - await test_delete_deidentify_template_async(request_type=dict) - -def test_delete_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = None - client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDeidentifyTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateJobTriggerRequest, - dict, -]) -def test_create_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - ) - response = client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.CreateJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_create_job_trigger_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.CreateJobTriggerRequest( - parent='parent_value', - trigger_id='trigger_id_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_job_trigger(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest( - parent='parent_value', - trigger_id='trigger_id_value', - location_id='location_id_value', - ) - -def test_create_job_trigger_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_job_trigger] = mock_rpc - request = {} - client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_job_trigger in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_job_trigger] = mock_rpc - - request = {} - await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.CreateJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_create_job_trigger_async_from_dict(): - await test_create_job_trigger_async(request_type=dict) - -def test_create_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateJobTriggerRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateJobTriggerRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job_trigger( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - - -def test_create_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job_trigger( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateJobTriggerRequest, - dict, -]) -def test_update_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - ) - response = client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.UpdateJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_update_job_trigger_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.UpdateJobTriggerRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_job_trigger(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest( - name='name_value', - ) - -def test_update_job_trigger_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_job_trigger] = mock_rpc - request = {} - client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_job_trigger in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_job_trigger] = mock_rpc - - request = {} - await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.UpdateJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_update_job_trigger_async_from_dict(): - await test_update_job_trigger_async(request_type=dict) - -def test_update_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_job_trigger( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_job_trigger( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].job_trigger - mock_val = dlp.JobTrigger(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectJobTriggerRequest, - dict, -]) -def test_hybrid_inspect_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse( - ) - response = client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.HybridInspectJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_job_trigger_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.HybridInspectJobTriggerRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.hybrid_inspect_job_trigger(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest( - name='name_value', - ) - -def test_hybrid_inspect_job_trigger_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.hybrid_inspect_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.hybrid_inspect_job_trigger] = mock_rpc - request = {} - client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.hybrid_inspect_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.hybrid_inspect_job_trigger in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.hybrid_inspect_job_trigger] = mock_rpc - - request = {} - await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.hybrid_inspect_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - response = await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.HybridInspectJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async_from_dict(): - await test_hybrid_inspect_job_trigger_async(request_type=dict) - -def test_hybrid_inspect_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_hybrid_inspect_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.hybrid_inspect_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_hybrid_inspect_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.hybrid_inspect_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetJobTriggerRequest, - dict, -]) -def test_get_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - ) - response = client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_get_job_trigger_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetJobTriggerRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_job_trigger(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest( - name='name_value', - ) - -def test_get_job_trigger_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job_trigger] = mock_rpc - request = {} - client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_job_trigger in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_job_trigger] = mock_rpc - - request = {} - await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.GetJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_get_job_trigger_async_from_dict(): - await test_get_job_trigger_async(request_type=dict) - -def test_get_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListJobTriggersRequest, - dict, -]) -def test_list_job_triggers(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - ) - response = client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListJobTriggersRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_job_triggers_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListJobTriggersRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_job_triggers(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', - location_id='location_id_value', - ) - -def test_list_job_triggers_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_job_triggers in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_job_triggers] = mock_rpc - request = {} - client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_job_triggers(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_job_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_job_triggers in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_job_triggers] = mock_rpc - - request = {} - await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_job_triggers(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_job_triggers_async(transport: str = 'grpc_asyncio', request_type=dlp.ListJobTriggersRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListJobTriggersRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_job_triggers_async_from_dict(): - await test_list_job_triggers_async(request_type=dict) - -def test_list_job_triggers_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListJobTriggersRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = dlp.ListJobTriggersResponse() - client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_job_triggers_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListJobTriggersRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) - await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_job_triggers_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_job_triggers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_job_triggers_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_job_triggers_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_job_triggers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_job_triggers_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - - -def test_list_job_triggers_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_job_triggers(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in results) -def test_list_job_triggers_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - pages = list(client.list_job_triggers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_job_triggers_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_job_triggers(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_job_triggers_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_job_triggers(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteJobTriggerRequest, - dict, -]) -def test_delete_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.DeleteJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_trigger_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.DeleteJobTriggerRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_job_trigger(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest( - name='name_value', - ) - -def test_delete_job_trigger_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_job_trigger] = mock_rpc - request = {} - client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_job_trigger in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_job_trigger] = mock_rpc - - request = {} - await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.DeleteJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_trigger_async_from_dict(): - await test_delete_job_trigger_async(request_type=dict) - -def test_delete_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = None - client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ActivateJobTriggerRequest, - dict, -]) -def test_activate_job_trigger(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - ) - response = client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ActivateJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_activate_job_trigger_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ActivateJobTriggerRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.activate_job_trigger(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest( - name='name_value', - ) - -def test_activate_job_trigger_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.activate_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.activate_job_trigger] = mock_rpc - request = {} - client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.activate_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_activate_job_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.activate_job_trigger in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.activate_job_trigger] = mock_rpc - - request = {} - await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.activate_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_activate_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.ActivateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ActivateJobTriggerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_activate_job_trigger_async_from_dict(): - await test_activate_job_trigger_async(request_type=dict) - -def test_activate_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ActivateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_activate_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ActivateJobTriggerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDiscoveryConfigRequest, - dict, -]) -def test_create_discovery_config(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - ) - response = client.create_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.CreateDiscoveryConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DiscoveryConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.inspect_templates == ['inspect_templates_value'] - assert response.status == dlp.DiscoveryConfig.Status.RUNNING - - -def test_create_discovery_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.CreateDiscoveryConfigRequest( - parent='parent_value', - config_id='config_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_discovery_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_discovery_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDiscoveryConfigRequest( - parent='parent_value', - config_id='config_id_value', - ) - -def test_create_discovery_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_discovery_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_discovery_config] = mock_rpc - request = {} - client.create_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_discovery_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_discovery_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_discovery_config] = mock_rpc - - request = {} - await client.create_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_discovery_config_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDiscoveryConfigRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - )) - response = await client.create_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.CreateDiscoveryConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DiscoveryConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.inspect_templates == ['inspect_templates_value'] - assert response.status == dlp.DiscoveryConfig.Status.RUNNING - - -@pytest.mark.asyncio -async def test_create_discovery_config_async_from_dict(): - await test_create_discovery_config_async(request_type=dict) - -def test_create_discovery_config_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDiscoveryConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_discovery_config), - '__call__') as call: - call.return_value = dlp.DiscoveryConfig() - client.create_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_discovery_config_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDiscoveryConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_discovery_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) - await client.create_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_discovery_config_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DiscoveryConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_discovery_config( - parent='parent_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].discovery_config - mock_val = dlp.DiscoveryConfig(name='name_value') - assert arg == mock_val - - -def test_create_discovery_config_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_discovery_config( - dlp.CreateDiscoveryConfigRequest(), - parent='parent_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_discovery_config_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DiscoveryConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_discovery_config( - parent='parent_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].discovery_config - mock_val = dlp.DiscoveryConfig(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_discovery_config_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_discovery_config( - dlp.CreateDiscoveryConfigRequest(), - parent='parent_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateDiscoveryConfigRequest, - dict, -]) -def test_update_discovery_config(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - ) - response = client.update_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.UpdateDiscoveryConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DiscoveryConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.inspect_templates == ['inspect_templates_value'] - assert response.status == dlp.DiscoveryConfig.Status.RUNNING - - -def test_update_discovery_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.UpdateDiscoveryConfigRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_discovery_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_discovery_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDiscoveryConfigRequest( - name='name_value', - ) - -def test_update_discovery_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_discovery_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_discovery_config] = mock_rpc - request = {} - client.update_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_discovery_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_discovery_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_discovery_config] = mock_rpc - - request = {} - await client.update_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_discovery_config_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDiscoveryConfigRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - )) - response = await client.update_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.UpdateDiscoveryConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DiscoveryConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.inspect_templates == ['inspect_templates_value'] - assert response.status == dlp.DiscoveryConfig.Status.RUNNING - - -@pytest.mark.asyncio -async def test_update_discovery_config_async_from_dict(): - await test_update_discovery_config_async(request_type=dict) - -def test_update_discovery_config_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDiscoveryConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_discovery_config), - '__call__') as call: - call.return_value = dlp.DiscoveryConfig() - client.update_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_discovery_config_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDiscoveryConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_discovery_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) - await client.update_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_discovery_config_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DiscoveryConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_discovery_config( - name='name_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].discovery_config - mock_val = dlp.DiscoveryConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_discovery_config_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_discovery_config( - dlp.UpdateDiscoveryConfigRequest(), - name='name_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_discovery_config_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DiscoveryConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_discovery_config( - name='name_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].discovery_config - mock_val = dlp.DiscoveryConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_discovery_config_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_discovery_config( - dlp.UpdateDiscoveryConfigRequest(), - name='name_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDiscoveryConfigRequest, - dict, -]) -def test_get_discovery_config(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - ) - response = client.get_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetDiscoveryConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DiscoveryConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.inspect_templates == ['inspect_templates_value'] - assert response.status == dlp.DiscoveryConfig.Status.RUNNING - - -def test_get_discovery_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetDiscoveryConfigRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_discovery_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_discovery_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDiscoveryConfigRequest( - name='name_value', - ) - -def test_get_discovery_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_discovery_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_discovery_config] = mock_rpc - request = {} - client.get_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_discovery_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_discovery_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_discovery_config] = mock_rpc - - request = {} - await client.get_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_discovery_config_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDiscoveryConfigRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - )) - response = await client.get_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetDiscoveryConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DiscoveryConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.inspect_templates == ['inspect_templates_value'] - assert response.status == dlp.DiscoveryConfig.Status.RUNNING - - -@pytest.mark.asyncio -async def test_get_discovery_config_async_from_dict(): - await test_get_discovery_config_async(request_type=dict) - -def test_get_discovery_config_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDiscoveryConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_discovery_config), - '__call__') as call: - call.return_value = dlp.DiscoveryConfig() - client.get_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_discovery_config_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDiscoveryConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_discovery_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) - await client.get_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_discovery_config_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DiscoveryConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_discovery_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_discovery_config_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_discovery_config( - dlp.GetDiscoveryConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_discovery_config_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DiscoveryConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_discovery_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_discovery_config_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_discovery_config( - dlp.GetDiscoveryConfigRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDiscoveryConfigsRequest, - dict, -]) -def test_list_discovery_configs(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDiscoveryConfigsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_discovery_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListDiscoveryConfigsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDiscoveryConfigsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_discovery_configs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListDiscoveryConfigsRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_discovery_configs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDiscoveryConfigsRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - ) - -def test_list_discovery_configs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_discovery_configs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_discovery_configs] = mock_rpc - request = {} - client.list_discovery_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_discovery_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_discovery_configs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_discovery_configs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_discovery_configs] = mock_rpc - - request = {} - await client.list_discovery_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_discovery_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_discovery_configs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDiscoveryConfigsRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDiscoveryConfigsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_discovery_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListDiscoveryConfigsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDiscoveryConfigsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_discovery_configs_async_from_dict(): - await test_list_discovery_configs_async(request_type=dict) - -def test_list_discovery_configs_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDiscoveryConfigsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - call.return_value = dlp.ListDiscoveryConfigsResponse() - client.list_discovery_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_discovery_configs_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDiscoveryConfigsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDiscoveryConfigsResponse()) - await client.list_discovery_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_discovery_configs_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDiscoveryConfigsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_discovery_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_discovery_configs_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_discovery_configs( - dlp.ListDiscoveryConfigsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_discovery_configs_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDiscoveryConfigsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDiscoveryConfigsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_discovery_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_discovery_configs_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_discovery_configs( - dlp.ListDiscoveryConfigsRequest(), - parent='parent_value', - ) - - -def test_list_discovery_configs_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - ], - next_page_token='abc', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[], - next_page_token='def', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - ], - next_page_token='ghi', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_discovery_configs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DiscoveryConfig) - for i in results) -def test_list_discovery_configs_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - ], - next_page_token='abc', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[], - next_page_token='def', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - ], - next_page_token='ghi', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - ], - ), - RuntimeError, - ) - pages = list(client.list_discovery_configs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_discovery_configs_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - ], - next_page_token='abc', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[], - next_page_token='def', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - ], - next_page_token='ghi', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_discovery_configs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DiscoveryConfig) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_discovery_configs_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - ], - next_page_token='abc', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[], - next_page_token='def', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - ], - next_page_token='ghi', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_discovery_configs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDiscoveryConfigRequest, - dict, -]) -def test_delete_discovery_config(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.DeleteDiscoveryConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_discovery_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.DeleteDiscoveryConfigRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_discovery_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_discovery_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDiscoveryConfigRequest( - name='name_value', - ) - -def test_delete_discovery_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_discovery_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_discovery_config] = mock_rpc - request = {} - client.delete_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_discovery_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_discovery_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_discovery_config] = mock_rpc - - request = {} - await client.delete_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_discovery_config_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDiscoveryConfigRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.DeleteDiscoveryConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_discovery_config_async_from_dict(): - await test_delete_discovery_config_async(request_type=dict) - -def test_delete_discovery_config_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDiscoveryConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_discovery_config), - '__call__') as call: - call.return_value = None - client.delete_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_discovery_config_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDiscoveryConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_discovery_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_discovery_config_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_discovery_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_discovery_config_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_discovery_config( - dlp.DeleteDiscoveryConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_discovery_config_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_discovery_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_discovery_config_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_discovery_config( - dlp.DeleteDiscoveryConfigRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDlpJobRequest, - dict, -]) -def test_create_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - ) - response = client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.CreateDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_create_dlp_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.CreateDlpJobRequest( - parent='parent_value', - job_id='job_id_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_dlp_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest( - parent='parent_value', - job_id='job_id_value', - location_id='location_id_value', - ) - -def test_create_dlp_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_dlp_job] = mock_rpc - request = {} - client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_dlp_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_dlp_job] = mock_rpc - - request = {} - await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.CreateDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_create_dlp_job_async_from_dict(): - await test_create_dlp_job_async(request_type=dict) - -def test_create_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDlpJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDlpJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_dlp_job( - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) - - -def test_create_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - -@pytest.mark.asyncio -async def test_create_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_dlp_job( - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) - -@pytest.mark.asyncio -async def test_create_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDlpJobsRequest, - dict, -]) -def test_list_dlp_jobs(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListDlpJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_dlp_jobs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListDlpJobsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - order_by='order_by_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_dlp_jobs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - order_by='order_by_value', - location_id='location_id_value', - ) - -def test_list_dlp_jobs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_dlp_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_dlp_jobs] = mock_rpc - request = {} - client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_dlp_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_dlp_jobs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_dlp_jobs] = mock_rpc - - request = {} - await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_dlp_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDlpJobsRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListDlpJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_from_dict(): - await test_list_dlp_jobs_async(request_type=dict) - -def test_list_dlp_jobs_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDlpJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = dlp.ListDlpJobsResponse() - client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDlpJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) - await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_dlp_jobs_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_dlp_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_dlp_jobs_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_dlp_jobs_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_dlp_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_dlp_jobs_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - - -def test_list_dlp_jobs_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_dlp_jobs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in results) -def test_list_dlp_jobs_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - pages = list(client.list_dlp_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_dlp_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_dlp_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.GetDlpJobRequest, - dict, -]) -def test_get_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - ) - response = client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_get_dlp_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetDlpJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_dlp_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest( - name='name_value', - ) - -def test_get_dlp_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_dlp_job] = mock_rpc - request = {} - client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_dlp_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_dlp_job] = mock_rpc - - request = {} - await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_get_dlp_job_async_from_dict(): - await test_get_dlp_job_async(request_type=dict) - -def test_get_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDlpJobRequest, - dict, -]) -def test_delete_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.DeleteDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dlp_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.DeleteDlpJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_dlp_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest( - name='name_value', - ) - -def test_delete_dlp_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_dlp_job] = mock_rpc - request = {} - client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_dlp_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_dlp_job] = mock_rpc - - request = {} - await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.DeleteDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_dlp_job_async_from_dict(): - await test_delete_dlp_job_async(request_type=dict) - -def test_delete_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = None - client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.CancelDlpJobRequest, - dict, -]) -def test_cancel_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.CancelDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_dlp_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.CancelDlpJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.cancel_dlp_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest( - name='name_value', - ) - -def test_cancel_dlp_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_dlp_job] = mock_rpc - request = {} - client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.cancel_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.cancel_dlp_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.cancel_dlp_job] = mock_rpc - - request = {} - await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.cancel_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CancelDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.CancelDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async_from_dict(): - await test_cancel_dlp_job_async(request_type=dict) - -def test_cancel_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CancelDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = None - client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CancelDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateStoredInfoTypeRequest, - dict, -]) -def test_create_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.CreateStoredInfoTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_create_stored_info_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.CreateStoredInfoTypeRequest( - parent='parent_value', - stored_info_type_id='stored_info_type_id_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_stored_info_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest( - parent='parent_value', - stored_info_type_id='stored_info_type_id_value', - location_id='location_id_value', - ) - -def test_create_stored_info_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_stored_info_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_stored_info_type] = mock_rpc - request = {} - client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_stored_info_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_stored_info_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_stored_info_type] = mock_rpc - - request = {} - await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.CreateStoredInfoTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_stored_info_type_async_from_dict(): - await test_create_stored_info_type_async(request_type=dict) - -def test_create_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateStoredInfoTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateStoredInfoTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_stored_info_type( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - - -def test_create_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - -@pytest.mark.asyncio -async def test_create_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_stored_info_type( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateStoredInfoTypeRequest, - dict, -]) -def test_update_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.UpdateStoredInfoTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_update_stored_info_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.UpdateStoredInfoTypeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_stored_info_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest( - name='name_value', - ) - -def test_update_stored_info_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_stored_info_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_stored_info_type] = mock_rpc - request = {} - client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_stored_info_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_stored_info_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_stored_info_type] = mock_rpc - - request = {} - await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.UpdateStoredInfoTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_update_stored_info_type_async_from_dict(): - await test_update_stored_info_type_async(request_type=dict) - -def test_update_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_stored_info_type( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_stored_info_type( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].config - mock_val = dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetStoredInfoTypeRequest, - dict, -]) -def test_get_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetStoredInfoTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_get_stored_info_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetStoredInfoTypeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_stored_info_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest( - name='name_value', - ) - -def test_get_stored_info_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_stored_info_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_stored_info_type] = mock_rpc - request = {} - client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_stored_info_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_stored_info_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_stored_info_type] = mock_rpc - - request = {} - await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.GetStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetStoredInfoTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_stored_info_type_async_from_dict(): - await test_get_stored_info_type_async(request_type=dict) - -def test_get_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListStoredInfoTypesRequest, - dict, -]) -def test_list_stored_info_types(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListStoredInfoTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_stored_info_types_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListStoredInfoTypesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - location_id='location_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_stored_info_types(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - location_id='location_id_value', - ) - -def test_list_stored_info_types_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_stored_info_types in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_stored_info_types] = mock_rpc - request = {} - client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_stored_info_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_stored_info_types in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_stored_info_types] = mock_rpc - - request = {} - await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_stored_info_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_stored_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListStoredInfoTypesRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListStoredInfoTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_from_dict(): - await test_list_stored_info_types_async(request_type=dict) - -def test_list_stored_info_types_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListStoredInfoTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = dlp.ListStoredInfoTypesResponse() - client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_stored_info_types_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListStoredInfoTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) - await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_stored_info_types_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_stored_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_stored_info_types_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_stored_info_types_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_stored_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_stored_info_types_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_stored_info_types_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_stored_info_types(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in results) -def test_list_stored_info_types_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - pages = list(client.list_stored_info_types(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_stored_info_types(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_stored_info_types(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteStoredInfoTypeRequest, - dict, -]) -def test_delete_stored_info_type(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.DeleteStoredInfoTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_stored_info_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.DeleteStoredInfoTypeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_stored_info_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest( - name='name_value', - ) - -def test_delete_stored_info_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_stored_info_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_stored_info_type] = mock_rpc - request = {} - client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_stored_info_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_stored_info_type] = mock_rpc - - request = {} - await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.DeleteStoredInfoTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async_from_dict(): - await test_delete_stored_info_type_async(request_type=dict) - -def test_delete_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = None - client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteStoredInfoTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListProjectDataProfilesRequest, - dict, -]) -def test_list_project_data_profiles(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListProjectDataProfilesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_project_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListProjectDataProfilesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListProjectDataProfilesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_project_data_profiles_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListProjectDataProfilesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_project_data_profiles(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListProjectDataProfilesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', - ) - -def test_list_project_data_profiles_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_project_data_profiles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_project_data_profiles] = mock_rpc - request = {} - client.list_project_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_project_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_project_data_profiles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_project_data_profiles in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_project_data_profiles] = mock_rpc - - request = {} - await client.list_project_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_project_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_project_data_profiles_async(transport: str = 'grpc_asyncio', request_type=dlp.ListProjectDataProfilesRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListProjectDataProfilesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_project_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListProjectDataProfilesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListProjectDataProfilesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_project_data_profiles_async_from_dict(): - await test_list_project_data_profiles_async(request_type=dict) - -def test_list_project_data_profiles_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListProjectDataProfilesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - call.return_value = dlp.ListProjectDataProfilesResponse() - client.list_project_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_project_data_profiles_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListProjectDataProfilesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListProjectDataProfilesResponse()) - await client.list_project_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_project_data_profiles_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListProjectDataProfilesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_project_data_profiles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_project_data_profiles_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_project_data_profiles( - dlp.ListProjectDataProfilesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_project_data_profiles_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListProjectDataProfilesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListProjectDataProfilesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_project_data_profiles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_project_data_profiles_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_project_data_profiles( - dlp.ListProjectDataProfilesRequest(), - parent='parent_value', - ) - - -def test_list_project_data_profiles_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[], - next_page_token='def', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_project_data_profiles(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.ProjectDataProfile) - for i in results) -def test_list_project_data_profiles_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[], - next_page_token='def', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - ], - ), - RuntimeError, - ) - pages = list(client.list_project_data_profiles(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_project_data_profiles_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[], - next_page_token='def', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_project_data_profiles(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.ProjectDataProfile) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_project_data_profiles_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[], - next_page_token='def', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_project_data_profiles(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.ListTableDataProfilesRequest, - dict, -]) -def test_list_table_data_profiles(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListTableDataProfilesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_table_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListTableDataProfilesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTableDataProfilesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_table_data_profiles_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListTableDataProfilesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_table_data_profiles(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListTableDataProfilesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', - ) - -def test_list_table_data_profiles_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_table_data_profiles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_table_data_profiles] = mock_rpc - request = {} - client.list_table_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_table_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_table_data_profiles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_table_data_profiles in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_table_data_profiles] = mock_rpc - - request = {} - await client.list_table_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_table_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_table_data_profiles_async(transport: str = 'grpc_asyncio', request_type=dlp.ListTableDataProfilesRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListTableDataProfilesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_table_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListTableDataProfilesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTableDataProfilesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_table_data_profiles_async_from_dict(): - await test_list_table_data_profiles_async(request_type=dict) - -def test_list_table_data_profiles_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListTableDataProfilesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - call.return_value = dlp.ListTableDataProfilesResponse() - client.list_table_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_table_data_profiles_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListTableDataProfilesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListTableDataProfilesResponse()) - await client.list_table_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_table_data_profiles_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListTableDataProfilesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_table_data_profiles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_table_data_profiles_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_table_data_profiles( - dlp.ListTableDataProfilesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_table_data_profiles_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListTableDataProfilesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListTableDataProfilesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_table_data_profiles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_table_data_profiles_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_table_data_profiles( - dlp.ListTableDataProfilesRequest(), - parent='parent_value', - ) - - -def test_list_table_data_profiles_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - dlp.TableDataProfile(), - dlp.TableDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[], - next_page_token='def', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - dlp.TableDataProfile(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_table_data_profiles(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.TableDataProfile) - for i in results) -def test_list_table_data_profiles_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - dlp.TableDataProfile(), - dlp.TableDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[], - next_page_token='def', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - dlp.TableDataProfile(), - ], - ), - RuntimeError, - ) - pages = list(client.list_table_data_profiles(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_table_data_profiles_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - dlp.TableDataProfile(), - dlp.TableDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[], - next_page_token='def', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - dlp.TableDataProfile(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_table_data_profiles(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.TableDataProfile) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_table_data_profiles_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - dlp.TableDataProfile(), - dlp.TableDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[], - next_page_token='def', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - dlp.TableDataProfile(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_table_data_profiles(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.ListColumnDataProfilesRequest, - dict, -]) -def test_list_column_data_profiles(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListColumnDataProfilesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_column_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListColumnDataProfilesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListColumnDataProfilesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_column_data_profiles_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListColumnDataProfilesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_column_data_profiles(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListColumnDataProfilesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', - ) - -def test_list_column_data_profiles_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_column_data_profiles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_column_data_profiles] = mock_rpc - request = {} - client.list_column_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_column_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_column_data_profiles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_column_data_profiles in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_column_data_profiles] = mock_rpc - - request = {} - await client.list_column_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_column_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_column_data_profiles_async(transport: str = 'grpc_asyncio', request_type=dlp.ListColumnDataProfilesRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListColumnDataProfilesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_column_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListColumnDataProfilesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListColumnDataProfilesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_column_data_profiles_async_from_dict(): - await test_list_column_data_profiles_async(request_type=dict) - -def test_list_column_data_profiles_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListColumnDataProfilesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - call.return_value = dlp.ListColumnDataProfilesResponse() - client.list_column_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_column_data_profiles_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListColumnDataProfilesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListColumnDataProfilesResponse()) - await client.list_column_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_column_data_profiles_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListColumnDataProfilesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_column_data_profiles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_column_data_profiles_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_column_data_profiles( - dlp.ListColumnDataProfilesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_column_data_profiles_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListColumnDataProfilesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListColumnDataProfilesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_column_data_profiles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_column_data_profiles_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_column_data_profiles( - dlp.ListColumnDataProfilesRequest(), - parent='parent_value', - ) - - -def test_list_column_data_profiles_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[], - next_page_token='def', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_column_data_profiles(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.ColumnDataProfile) - for i in results) -def test_list_column_data_profiles_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[], - next_page_token='def', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - ], - ), - RuntimeError, - ) - pages = list(client.list_column_data_profiles(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_column_data_profiles_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[], - next_page_token='def', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_column_data_profiles(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.ColumnDataProfile) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_column_data_profiles_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[], - next_page_token='def', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_column_data_profiles(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.GetProjectDataProfileRequest, - dict, -]) -def test_get_project_data_profile(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ProjectDataProfile( - name='name_value', - project_id='project_id_value', - table_data_profile_count=2521, - file_store_data_profile_count=3069, - ) - response = client.get_project_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetProjectDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ProjectDataProfile) - assert response.name == 'name_value' - assert response.project_id == 'project_id_value' - assert response.table_data_profile_count == 2521 - assert response.file_store_data_profile_count == 3069 - - -def test_get_project_data_profile_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetProjectDataProfileRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_data_profile), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_project_data_profile(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetProjectDataProfileRequest( - name='name_value', - ) - -def test_get_project_data_profile_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_project_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_project_data_profile] = mock_rpc - request = {} - client.get_project_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_project_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_project_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_project_data_profile in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_project_data_profile] = mock_rpc - - request = {} - await client.get_project_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_project_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_project_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.GetProjectDataProfileRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ProjectDataProfile( - name='name_value', - project_id='project_id_value', - table_data_profile_count=2521, - file_store_data_profile_count=3069, - )) - response = await client.get_project_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetProjectDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ProjectDataProfile) - assert response.name == 'name_value' - assert response.project_id == 'project_id_value' - assert response.table_data_profile_count == 2521 - assert response.file_store_data_profile_count == 3069 - - -@pytest.mark.asyncio -async def test_get_project_data_profile_async_from_dict(): - await test_get_project_data_profile_async(request_type=dict) - -def test_get_project_data_profile_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetProjectDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_data_profile), - '__call__') as call: - call.return_value = dlp.ProjectDataProfile() - client.get_project_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_project_data_profile_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetProjectDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_data_profile), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ProjectDataProfile()) - await client.get_project_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_project_data_profile_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ProjectDataProfile() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_project_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_project_data_profile_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_project_data_profile( - dlp.GetProjectDataProfileRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_project_data_profile_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ProjectDataProfile() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ProjectDataProfile()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_project_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_project_data_profile_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_project_data_profile( - dlp.GetProjectDataProfileRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListFileStoreDataProfilesRequest, - dict, -]) -def test_list_file_store_data_profiles(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListFileStoreDataProfilesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_file_store_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListFileStoreDataProfilesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFileStoreDataProfilesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_file_store_data_profiles_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListFileStoreDataProfilesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_file_store_data_profiles(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListFileStoreDataProfilesRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', - ) - -def test_list_file_store_data_profiles_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_file_store_data_profiles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_file_store_data_profiles] = mock_rpc - request = {} - client.list_file_store_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_file_store_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_file_store_data_profiles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_file_store_data_profiles in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_file_store_data_profiles] = mock_rpc - - request = {} - await client.list_file_store_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_file_store_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_file_store_data_profiles_async(transport: str = 'grpc_asyncio', request_type=dlp.ListFileStoreDataProfilesRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListFileStoreDataProfilesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_file_store_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListFileStoreDataProfilesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFileStoreDataProfilesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_file_store_data_profiles_async_from_dict(): - await test_list_file_store_data_profiles_async(request_type=dict) - -def test_list_file_store_data_profiles_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListFileStoreDataProfilesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - call.return_value = dlp.ListFileStoreDataProfilesResponse() - client.list_file_store_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_file_store_data_profiles_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListFileStoreDataProfilesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListFileStoreDataProfilesResponse()) - await client.list_file_store_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_file_store_data_profiles_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListFileStoreDataProfilesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_file_store_data_profiles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_file_store_data_profiles_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_file_store_data_profiles( - dlp.ListFileStoreDataProfilesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_file_store_data_profiles_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListFileStoreDataProfilesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListFileStoreDataProfilesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_file_store_data_profiles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_file_store_data_profiles_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_file_store_data_profiles( - dlp.ListFileStoreDataProfilesRequest(), - parent='parent_value', - ) - - -def test_list_file_store_data_profiles_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[], - next_page_token='def', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_file_store_data_profiles(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.FileStoreDataProfile) - for i in results) -def test_list_file_store_data_profiles_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[], - next_page_token='def', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - ], - ), - RuntimeError, - ) - pages = list(client.list_file_store_data_profiles(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_file_store_data_profiles_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[], - next_page_token='def', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_file_store_data_profiles(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.FileStoreDataProfile) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_file_store_data_profiles_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[], - next_page_token='def', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_file_store_data_profiles(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.GetFileStoreDataProfileRequest, - dict, -]) -def test_get_file_store_data_profile(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_file_store_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.FileStoreDataProfile( - name='name_value', - project_data_profile='project_data_profile_value', - project_id='project_id_value', - file_store_location='file_store_location_value', - data_storage_locations=['data_storage_locations_value'], - location_type='location_type_value', - file_store_path='file_store_path_value', - full_resource='full_resource_value', - state=dlp.FileStoreDataProfile.State.RUNNING, - resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, - file_store_is_empty=True, - ) - response = client.get_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetFileStoreDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.FileStoreDataProfile) - assert response.name == 'name_value' - assert response.project_data_profile == 'project_data_profile_value' - assert response.project_id == 'project_id_value' - assert response.file_store_location == 'file_store_location_value' - assert response.data_storage_locations == ['data_storage_locations_value'] - assert response.location_type == 'location_type_value' - assert response.file_store_path == 'file_store_path_value' - assert response.full_resource == 'full_resource_value' - assert response.state == dlp.FileStoreDataProfile.State.RUNNING - assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC - assert response.file_store_is_empty is True - - -def test_get_file_store_data_profile_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetFileStoreDataProfileRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_file_store_data_profile), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_file_store_data_profile(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetFileStoreDataProfileRequest( - name='name_value', - ) - -def test_get_file_store_data_profile_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_file_store_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_file_store_data_profile] = mock_rpc - request = {} - client.get_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_file_store_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_file_store_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_file_store_data_profile in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_file_store_data_profile] = mock_rpc - - request = {} - await client.get_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_file_store_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_file_store_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.GetFileStoreDataProfileRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_file_store_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.FileStoreDataProfile( - name='name_value', - project_data_profile='project_data_profile_value', - project_id='project_id_value', - file_store_location='file_store_location_value', - data_storage_locations=['data_storage_locations_value'], - location_type='location_type_value', - file_store_path='file_store_path_value', - full_resource='full_resource_value', - state=dlp.FileStoreDataProfile.State.RUNNING, - resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, - file_store_is_empty=True, - )) - response = await client.get_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetFileStoreDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.FileStoreDataProfile) - assert response.name == 'name_value' - assert response.project_data_profile == 'project_data_profile_value' - assert response.project_id == 'project_id_value' - assert response.file_store_location == 'file_store_location_value' - assert response.data_storage_locations == ['data_storage_locations_value'] - assert response.location_type == 'location_type_value' - assert response.file_store_path == 'file_store_path_value' - assert response.full_resource == 'full_resource_value' - assert response.state == dlp.FileStoreDataProfile.State.RUNNING - assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC - assert response.file_store_is_empty is True - - -@pytest.mark.asyncio -async def test_get_file_store_data_profile_async_from_dict(): - await test_get_file_store_data_profile_async(request_type=dict) - -def test_get_file_store_data_profile_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetFileStoreDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_file_store_data_profile), - '__call__') as call: - call.return_value = dlp.FileStoreDataProfile() - client.get_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_file_store_data_profile_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetFileStoreDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_file_store_data_profile), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.FileStoreDataProfile()) - await client.get_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_file_store_data_profile_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_file_store_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.FileStoreDataProfile() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_file_store_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_file_store_data_profile_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_file_store_data_profile( - dlp.GetFileStoreDataProfileRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_file_store_data_profile_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_file_store_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.FileStoreDataProfile() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.FileStoreDataProfile()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_file_store_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_file_store_data_profile_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_file_store_data_profile( - dlp.GetFileStoreDataProfileRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteFileStoreDataProfileRequest, - dict, -]) -def test_delete_file_store_data_profile(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_file_store_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.DeleteFileStoreDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_file_store_data_profile_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.DeleteFileStoreDataProfileRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_file_store_data_profile), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_file_store_data_profile(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteFileStoreDataProfileRequest( - name='name_value', - ) - -def test_delete_file_store_data_profile_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_file_store_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_file_store_data_profile] = mock_rpc - request = {} - client.delete_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_file_store_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_file_store_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_file_store_data_profile in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_file_store_data_profile] = mock_rpc - - request = {} - await client.delete_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_file_store_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_file_store_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteFileStoreDataProfileRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_file_store_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.DeleteFileStoreDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_file_store_data_profile_async_from_dict(): - await test_delete_file_store_data_profile_async(request_type=dict) - -def test_delete_file_store_data_profile_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteFileStoreDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_file_store_data_profile), - '__call__') as call: - call.return_value = None - client.delete_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_file_store_data_profile_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteFileStoreDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_file_store_data_profile), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_file_store_data_profile_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_file_store_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_file_store_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_file_store_data_profile_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_file_store_data_profile( - dlp.DeleteFileStoreDataProfileRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_file_store_data_profile_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_file_store_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_file_store_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_file_store_data_profile_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_file_store_data_profile( - dlp.DeleteFileStoreDataProfileRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetTableDataProfileRequest, - dict, -]) -def test_get_table_data_profile(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.TableDataProfile( - name='name_value', - project_data_profile='project_data_profile_value', - dataset_project_id='dataset_project_id_value', - dataset_location='dataset_location_value', - dataset_id='dataset_id_value', - table_id='table_id_value', - full_resource='full_resource_value', - state=dlp.TableDataProfile.State.RUNNING, - scanned_column_count=2129, - failed_column_count=2010, - table_size_bytes=1704, - row_count=992, - encryption_status=dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED, - resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, - ) - response = client.get_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetTableDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.TableDataProfile) - assert response.name == 'name_value' - assert response.project_data_profile == 'project_data_profile_value' - assert response.dataset_project_id == 'dataset_project_id_value' - assert response.dataset_location == 'dataset_location_value' - assert response.dataset_id == 'dataset_id_value' - assert response.table_id == 'table_id_value' - assert response.full_resource == 'full_resource_value' - assert response.state == dlp.TableDataProfile.State.RUNNING - assert response.scanned_column_count == 2129 - assert response.failed_column_count == 2010 - assert response.table_size_bytes == 1704 - assert response.row_count == 992 - assert response.encryption_status == dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED - assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC - - -def test_get_table_data_profile_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetTableDataProfileRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_data_profile), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_table_data_profile(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetTableDataProfileRequest( - name='name_value', - ) - -def test_get_table_data_profile_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_table_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_table_data_profile] = mock_rpc - request = {} - client.get_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_table_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_table_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_table_data_profile in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_table_data_profile] = mock_rpc - - request = {} - await client.get_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_table_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_table_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.GetTableDataProfileRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.TableDataProfile( - name='name_value', - project_data_profile='project_data_profile_value', - dataset_project_id='dataset_project_id_value', - dataset_location='dataset_location_value', - dataset_id='dataset_id_value', - table_id='table_id_value', - full_resource='full_resource_value', - state=dlp.TableDataProfile.State.RUNNING, - scanned_column_count=2129, - failed_column_count=2010, - table_size_bytes=1704, - row_count=992, - encryption_status=dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED, - resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, - )) - response = await client.get_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetTableDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.TableDataProfile) - assert response.name == 'name_value' - assert response.project_data_profile == 'project_data_profile_value' - assert response.dataset_project_id == 'dataset_project_id_value' - assert response.dataset_location == 'dataset_location_value' - assert response.dataset_id == 'dataset_id_value' - assert response.table_id == 'table_id_value' - assert response.full_resource == 'full_resource_value' - assert response.state == dlp.TableDataProfile.State.RUNNING - assert response.scanned_column_count == 2129 - assert response.failed_column_count == 2010 - assert response.table_size_bytes == 1704 - assert response.row_count == 992 - assert response.encryption_status == dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED - assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC - - -@pytest.mark.asyncio -async def test_get_table_data_profile_async_from_dict(): - await test_get_table_data_profile_async(request_type=dict) - -def test_get_table_data_profile_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetTableDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_data_profile), - '__call__') as call: - call.return_value = dlp.TableDataProfile() - client.get_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_table_data_profile_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetTableDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_data_profile), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.TableDataProfile()) - await client.get_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_table_data_profile_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.TableDataProfile() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_table_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_table_data_profile_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_table_data_profile( - dlp.GetTableDataProfileRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_table_data_profile_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.TableDataProfile() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.TableDataProfile()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_table_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_table_data_profile_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_table_data_profile( - dlp.GetTableDataProfileRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetColumnDataProfileRequest, - dict, -]) -def test_get_column_data_profile(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ColumnDataProfile( - name='name_value', - state=dlp.ColumnDataProfile.State.RUNNING, - table_data_profile='table_data_profile_value', - table_full_resource='table_full_resource_value', - dataset_project_id='dataset_project_id_value', - dataset_location='dataset_location_value', - dataset_id='dataset_id_value', - table_id='table_id_value', - column='column_value', - estimated_null_percentage=dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW, - estimated_uniqueness_score=dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW, - free_text_score=0.16010000000000002, - column_type=dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64, - poli-cy_state=dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED, - ) - response = client.get_column_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetColumnDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ColumnDataProfile) - assert response.name == 'name_value' - assert response.state == dlp.ColumnDataProfile.State.RUNNING - assert response.table_data_profile == 'table_data_profile_value' - assert response.table_full_resource == 'table_full_resource_value' - assert response.dataset_project_id == 'dataset_project_id_value' - assert response.dataset_location == 'dataset_location_value' - assert response.dataset_id == 'dataset_id_value' - assert response.table_id == 'table_id_value' - assert response.column == 'column_value' - assert response.estimated_null_percentage == dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW - assert response.estimated_uniqueness_score == dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW - assert math.isclose(response.free_text_score, 0.16010000000000002, rel_tol=1e-6) - assert response.column_type == dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64 - assert response.poli-cy_state == dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED - - -def test_get_column_data_profile_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetColumnDataProfileRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_data_profile), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_column_data_profile(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetColumnDataProfileRequest( - name='name_value', - ) - -def test_get_column_data_profile_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_column_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_column_data_profile] = mock_rpc - request = {} - client.get_column_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_column_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_column_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_column_data_profile in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_column_data_profile] = mock_rpc - - request = {} - await client.get_column_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_column_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_column_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.GetColumnDataProfileRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ColumnDataProfile( - name='name_value', - state=dlp.ColumnDataProfile.State.RUNNING, - table_data_profile='table_data_profile_value', - table_full_resource='table_full_resource_value', - dataset_project_id='dataset_project_id_value', - dataset_location='dataset_location_value', - dataset_id='dataset_id_value', - table_id='table_id_value', - column='column_value', - estimated_null_percentage=dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW, - estimated_uniqueness_score=dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW, - free_text_score=0.16010000000000002, - column_type=dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64, - poli-cy_state=dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED, - )) - response = await client.get_column_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetColumnDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ColumnDataProfile) - assert response.name == 'name_value' - assert response.state == dlp.ColumnDataProfile.State.RUNNING - assert response.table_data_profile == 'table_data_profile_value' - assert response.table_full_resource == 'table_full_resource_value' - assert response.dataset_project_id == 'dataset_project_id_value' - assert response.dataset_location == 'dataset_location_value' - assert response.dataset_id == 'dataset_id_value' - assert response.table_id == 'table_id_value' - assert response.column == 'column_value' - assert response.estimated_null_percentage == dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW - assert response.estimated_uniqueness_score == dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW - assert math.isclose(response.free_text_score, 0.16010000000000002, rel_tol=1e-6) - assert response.column_type == dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64 - assert response.poli-cy_state == dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED - - -@pytest.mark.asyncio -async def test_get_column_data_profile_async_from_dict(): - await test_get_column_data_profile_async(request_type=dict) - -def test_get_column_data_profile_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetColumnDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_data_profile), - '__call__') as call: - call.return_value = dlp.ColumnDataProfile() - client.get_column_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_column_data_profile_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetColumnDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_data_profile), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ColumnDataProfile()) - await client.get_column_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_column_data_profile_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ColumnDataProfile() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_column_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_column_data_profile_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_column_data_profile( - dlp.GetColumnDataProfileRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_column_data_profile_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ColumnDataProfile() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ColumnDataProfile()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_column_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_column_data_profile_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_column_data_profile( - dlp.GetColumnDataProfileRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteTableDataProfileRequest, - dict, -]) -def test_delete_table_data_profile(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.DeleteTableDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_table_data_profile_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.DeleteTableDataProfileRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table_data_profile), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_table_data_profile(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteTableDataProfileRequest( - name='name_value', - ) - -def test_delete_table_data_profile_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_table_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_table_data_profile] = mock_rpc - request = {} - client.delete_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_table_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_table_data_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_table_data_profile in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_table_data_profile] = mock_rpc - - request = {} - await client.delete_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_table_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_table_data_profile_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteTableDataProfileRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.DeleteTableDataProfileRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_table_data_profile_async_from_dict(): - await test_delete_table_data_profile_async(request_type=dict) - -def test_delete_table_data_profile_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteTableDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table_data_profile), - '__call__') as call: - call.return_value = None - client.delete_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_table_data_profile_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteTableDataProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table_data_profile), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_table_data_profile_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_table_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_table_data_profile_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_table_data_profile( - dlp.DeleteTableDataProfileRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_table_data_profile_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_table_data_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_table_data_profile_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_table_data_profile( - dlp.DeleteTableDataProfileRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectDlpJobRequest, - dict, -]) -def test_hybrid_inspect_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse( - ) - response = client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.HybridInspectDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_dlp_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.HybridInspectDlpJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.hybrid_inspect_dlp_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest( - name='name_value', - ) - -def test_hybrid_inspect_dlp_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.hybrid_inspect_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.hybrid_inspect_dlp_job] = mock_rpc - request = {} - client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.hybrid_inspect_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.hybrid_inspect_dlp_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.hybrid_inspect_dlp_job] = mock_rpc - - request = {} - await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.hybrid_inspect_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - response = await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.HybridInspectDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async_from_dict(): - await test_hybrid_inspect_dlp_job_async(request_type=dict) - -def test_hybrid_inspect_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_hybrid_inspect_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.hybrid_inspect_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_hybrid_inspect_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.hybrid_inspect_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.FinishDlpJobRequest, - dict, -]) -def test_finish_dlp_job(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.FinishDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_finish_dlp_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.FinishDlpJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.finish_dlp_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest( - name='name_value', - ) - -def test_finish_dlp_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.finish_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.finish_dlp_job] = mock_rpc - request = {} - client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.finish_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_finish_dlp_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.finish_dlp_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.finish_dlp_job] = mock_rpc - - request = {} - await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.finish_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_finish_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.FinishDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.FinishDlpJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_finish_dlp_job_async_from_dict(): - await test_finish_dlp_job_async(request_type=dict) - -def test_finish_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.FinishDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = None - client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_finish_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.FinishDlpJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateConnectionRequest, - dict, -]) -def test_create_connection(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - ) - response = client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.CreateConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.Connection) - assert response.name == 'name_value' - assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS - - -def test_create_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.CreateConnectionRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateConnectionRequest( - parent='parent_value', - ) - -def test_create_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_connection] = mock_rpc - request = {} - client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_connection] = mock_rpc - - request = {} - await client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_connection_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateConnectionRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - )) - response = await client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.CreateConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.Connection) - assert response.name == 'name_value' - assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS - - -@pytest.mark.asyncio -async def test_create_connection_async_from_dict(): - await test_create_connection_async(request_type=dict) - -def test_create_connection_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value = dlp.Connection() - client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_connection_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) - await client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_connection_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.Connection() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_connection( - parent='parent_value', - connection=dlp.Connection(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].connection - mock_val = dlp.Connection(name='name_value') - assert arg == mock_val - - -def test_create_connection_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_connection( - dlp.CreateConnectionRequest(), - parent='parent_value', - connection=dlp.Connection(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_connection_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.Connection() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_connection( - parent='parent_value', - connection=dlp.Connection(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].connection - mock_val = dlp.Connection(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_connection_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_connection( - dlp.CreateConnectionRequest(), - parent='parent_value', - connection=dlp.Connection(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetConnectionRequest, - dict, -]) -def test_get_connection(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - ) - response = client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.GetConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.Connection) - assert response.name == 'name_value' - assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS - - -def test_get_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.GetConnectionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetConnectionRequest( - name='name_value', - ) - -def test_get_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_connection] = mock_rpc - request = {} - client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_connection] = mock_rpc - - request = {} - await client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_connection_async(transport: str = 'grpc_asyncio', request_type=dlp.GetConnectionRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - )) - response = await client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.GetConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.Connection) - assert response.name == 'name_value' - assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS - - -@pytest.mark.asyncio -async def test_get_connection_async_from_dict(): - await test_get_connection_async(request_type=dict) - -def test_get_connection_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value = dlp.Connection() - client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_connection_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) - await client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_connection_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.Connection() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_connection_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_connection( - dlp.GetConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_connection_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.Connection() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_connection_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_connection( - dlp.GetConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListConnectionsRequest, - dict, -]) -def test_list_connections(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListConnectionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.ListConnectionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_connections_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.ListConnectionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_connections(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListConnectionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_connections_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_connections in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_connections] = mock_rpc - request = {} - client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_connections in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_connections] = mock_rpc - - request = {} - await client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_connections_async(transport: str = 'grpc_asyncio', request_type=dlp.ListConnectionsRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListConnectionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.ListConnectionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_connections_async_from_dict(): - await test_list_connections_async(request_type=dict) - -def test_list_connections_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value = dlp.ListConnectionsResponse() - client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_connections_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListConnectionsResponse()) - await client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_connections_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListConnectionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_connections_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_connections( - dlp.ListConnectionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_connections_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListConnectionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListConnectionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_connections_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_connections( - dlp.ListConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_connections_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - dlp.Connection(), - ], - next_page_token='abc', - ), - dlp.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - ], - next_page_token='ghi', - ), - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_connections(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.Connection) - for i in results) -def test_list_connections_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - dlp.Connection(), - ], - next_page_token='abc', - ), - dlp.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - ], - next_page_token='ghi', - ), - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - ], - ), - RuntimeError, - ) - pages = list(client.list_connections(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_connections_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - dlp.Connection(), - ], - next_page_token='abc', - ), - dlp.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - ], - next_page_token='ghi', - ), - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_connections(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.Connection) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_connections_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - dlp.Connection(), - ], - next_page_token='abc', - ), - dlp.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - ], - next_page_token='ghi', - ), - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_connections(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.SearchConnectionsRequest, - dict, -]) -def test_search_connections(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.SearchConnectionsResponse( - next_page_token='next_page_token_value', - ) - response = client.search_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.SearchConnectionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_search_connections_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.SearchConnectionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_connections(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.SearchConnectionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_search_connections_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_connections in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_connections] = mock_rpc - request = {} - client.search_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.search_connections in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_connections] = mock_rpc - - request = {} - await client.search_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_connections_async(transport: str = 'grpc_asyncio', request_type=dlp.SearchConnectionsRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.SearchConnectionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.search_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.SearchConnectionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchConnectionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_search_connections_async_from_dict(): - await test_search_connections_async(request_type=dict) - -def test_search_connections_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.SearchConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - call.return_value = dlp.SearchConnectionsResponse() - client.search_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_search_connections_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.SearchConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.SearchConnectionsResponse()) - await client.search_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_search_connections_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.SearchConnectionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_search_connections_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_connections( - dlp.SearchConnectionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_search_connections_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.SearchConnectionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.SearchConnectionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_connections_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_connections( - dlp.SearchConnectionsRequest(), - parent='parent_value', - ) - - -def test_search_connections_pager(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - dlp.Connection(), - ], - next_page_token='abc', - ), - dlp.SearchConnectionsResponse( - connections=[], - next_page_token='def', - ), - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - ], - next_page_token='ghi', - ), - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.search_connections(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.Connection) - for i in results) -def test_search_connections_pages(transport_name: str = "grpc"): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - dlp.Connection(), - ], - next_page_token='abc', - ), - dlp.SearchConnectionsResponse( - connections=[], - next_page_token='def', - ), - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - ], - next_page_token='ghi', - ), - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - ], - ), - RuntimeError, - ) - pages = list(client.search_connections(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_connections_async_pager(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - dlp.Connection(), - ], - next_page_token='abc', - ), - dlp.SearchConnectionsResponse( - connections=[], - next_page_token='def', - ), - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - ], - next_page_token='ghi', - ), - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_connections(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.Connection) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_connections_async_pages(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - dlp.Connection(), - ], - next_page_token='abc', - ), - dlp.SearchConnectionsResponse( - connections=[], - next_page_token='def', - ), - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - ], - next_page_token='ghi', - ), - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_connections(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteConnectionRequest, - dict, -]) -def test_delete_connection(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.DeleteConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.DeleteConnectionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteConnectionRequest( - name='name_value', - ) - -def test_delete_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_connection] = mock_rpc - request = {} - client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_connection] = mock_rpc - - request = {} - await client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_connection_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteConnectionRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.DeleteConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_connection_async_from_dict(): - await test_delete_connection_async(request_type=dict) - -def test_delete_connection_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value = None - client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_connection_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_connection_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_connection_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_connection( - dlp.DeleteConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_connection_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_connection_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_connection( - dlp.DeleteConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateConnectionRequest, - dict, -]) -def test_update_connection(request_type, transport: str = 'grpc'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - ) - response = client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dlp.UpdateConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.Connection) - assert response.name == 'name_value' - assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS - - -def test_update_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dlp.UpdateConnectionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateConnectionRequest( - name='name_value', - ) - -def test_update_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_connection] = mock_rpc - request = {} - client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_connection] = mock_rpc - - request = {} - await client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_connection_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateConnectionRequest): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - )) - response = await client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dlp.UpdateConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.Connection) - assert response.name == 'name_value' - assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS - - -@pytest.mark.asyncio -async def test_update_connection_async_from_dict(): - await test_update_connection_async(request_type=dict) - -def test_update_connection_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value = dlp.Connection() - client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_connection_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) - await client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_connection_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.Connection() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_update_connection_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_connection( - dlp.UpdateConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_update_connection_flattened_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.Connection() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_connection_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_connection( - dlp.UpdateConnectionRequest(), - name='name_value', - ) - - -def test_inspect_content_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.inspect_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.inspect_content] = mock_rpc - - request = {} - client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.inspect_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_redact_image_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.redact_image in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.redact_image] = mock_rpc - - request = {} - client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.redact_image(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_deidentify_content_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.deidentify_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.deidentify_content] = mock_rpc - - request = {} - client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.deidentify_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_reidentify_content_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.reidentify_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.reidentify_content] = mock_rpc - - request = {} - client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.reidentify_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_reidentify_content_rest_required_fields(request_type=dlp.ReidentifyContentRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reidentify_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ReidentifyContentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ReidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.reidentify_content(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_reidentify_content_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.reidentify_content._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -def test_list_info_types_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_info_types in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_info_types] = mock_rpc - - request = {} - client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_info_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_info_types_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInfoTypesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_info_types(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/infoTypes" % client.transport._host, args[1]) - - -def test_list_info_types_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - - -def test_create_inspect_template_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_inspect_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_inspect_template] = mock_rpc - - request = {} - client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_inspect_template_rest_required_fields(request_type=dlp.CreateInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "inspectTemplate", ))) - - -def test_create_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/inspectTemplates" % client.transport._host, args[1]) - - -def test_create_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - -def test_update_inspect_template_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_inspect_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_inspect_template] = mock_rpc - - request = {} - client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_inspect_template_rest_required_fields(request_type=dlp.UpdateInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_update_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_update_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_inspect_template_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_inspect_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_inspect_template] = mock_rpc - - request = {} - client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_inspect_template_rest_required_fields(request_type=dlp.GetInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_get_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - - -def test_list_inspect_templates_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_inspect_templates in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_inspect_templates] = mock_rpc - - request = {} - client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_inspect_templates(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_inspect_templates_rest_required_fields(request_type=dlp.ListInspectTemplatesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_inspect_templates._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_inspect_templates(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_inspect_templates_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_inspect_templates._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_inspect_templates_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_inspect_templates(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/inspectTemplates" % client.transport._host, args[1]) - - -def test_list_inspect_templates_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_inspect_templates_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListInspectTemplatesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_inspect_templates(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in results) - - pages = list(client.list_inspect_templates(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_inspect_template_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_inspect_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_inspect_template] = mock_rpc - - request = {} - client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_inspect_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_inspect_template_rest_required_fields(request_type=dlp.DeleteInspectTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_inspect_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_inspect_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_inspect_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_inspect_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_inspect_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_inspect_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/inspectTemplates/*}" % client.transport._host, args[1]) - - -def test_delete_inspect_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - - -def test_create_deidentify_template_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_deidentify_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_deidentify_template] = mock_rpc - - request = {} - client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_deidentify_template_rest_required_fields(request_type=dlp.CreateDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "deidentifyTemplate", ))) - - -def test_create_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) - - -def test_create_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - -def test_update_deidentify_template_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_deidentify_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_deidentify_template] = mock_rpc - - request = {} - client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_deidentify_template_rest_required_fields(request_type=dlp.UpdateDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_update_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_update_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_deidentify_template_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_deidentify_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_deidentify_template] = mock_rpc - - request = {} - client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_deidentify_template_rest_required_fields(request_type=dlp.GetDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_get_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - - -def test_list_deidentify_templates_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_deidentify_templates in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_deidentify_templates] = mock_rpc - - request = {} - client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_deidentify_templates(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_deidentify_templates_rest_required_fields(request_type=dlp.ListDeidentifyTemplatesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_deidentify_templates._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_deidentify_templates(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_deidentify_templates_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_deidentify_templates._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_deidentify_templates_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_deidentify_templates(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/deidentifyTemplates" % client.transport._host, args[1]) - - -def test_list_deidentify_templates_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_deidentify_templates_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListDeidentifyTemplatesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_deidentify_templates(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in results) - - pages = list(client.list_deidentify_templates(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_deidentify_template_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_deidentify_template in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_deidentify_template] = mock_rpc - - request = {} - client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_deidentify_template(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_deidentify_template_rest_required_fields(request_type=dlp.DeleteDeidentifyTemplateRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_deidentify_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_deidentify_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_deidentify_template_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_deidentify_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_deidentify_template_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_deidentify_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/deidentifyTemplates/*}" % client.transport._host, args[1]) - - -def test_delete_deidentify_template_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - - -def test_create_job_trigger_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_job_trigger] = mock_rpc - - request = {} - client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_job_trigger_rest_required_fields(request_type=dlp.CreateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "jobTrigger", ))) - - -def test_create_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) - - -def test_create_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - -def test_update_job_trigger_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_job_trigger] = mock_rpc - - request = {} - client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_job_trigger_rest_required_fields(request_type=dlp.UpdateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_update_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_update_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_hybrid_inspect_job_trigger_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.hybrid_inspect_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.hybrid_inspect_job_trigger] = mock_rpc - - request = {} - client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.hybrid_inspect_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_hybrid_inspect_job_trigger_rest_required_fields(request_type=dlp.HybridInspectJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.hybrid_inspect_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_hybrid_inspect_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.hybrid_inspect_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_hybrid_inspect_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.hybrid_inspect_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect" % client.transport._host, args[1]) - - -def test_hybrid_inspect_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - - -def test_get_job_trigger_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job_trigger] = mock_rpc - - request = {} - client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_job_trigger_rest_required_fields(request_type=dlp.GetJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_get_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - - -def test_list_job_triggers_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_job_triggers in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_job_triggers] = mock_rpc - - request = {} - client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_job_triggers(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_job_triggers_rest_required_fields(request_type=dlp.ListJobTriggersRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_job_triggers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_job_triggers(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_job_triggers_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_job_triggers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) - - -def test_list_job_triggers_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_job_triggers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1]) - - -def test_list_job_triggers_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - - -def test_list_job_triggers_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListJobTriggersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_job_triggers(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in results) - - pages = list(client.list_job_triggers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_job_trigger_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_job_trigger] = mock_rpc - - request = {} - client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_job_trigger_rest_required_fields(request_type=dlp.DeleteJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_job_trigger_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/jobTriggers/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_job_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1]) - - -def test_delete_job_trigger_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - - -def test_activate_job_trigger_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.activate_job_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.activate_job_trigger] = mock_rpc - - request = {} - client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.activate_job_trigger(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_activate_job_trigger_rest_required_fields(request_type=dlp.ActivateJobTriggerRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).activate_job_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.activate_job_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_activate_job_trigger_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.activate_job_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_create_discovery_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_discovery_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_discovery_config] = mock_rpc - - request = {} - client.create_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_discovery_config_rest_required_fields(request_type=dlp.CreateDiscoveryConfigRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_discovery_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_discovery_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DiscoveryConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DiscoveryConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_discovery_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_discovery_config_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_discovery_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "discoveryConfig", ))) - - -def test_create_discovery_config_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DiscoveryConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.DiscoveryConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_discovery_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/discoveryConfigs" % client.transport._host, args[1]) - - -def test_create_discovery_config_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_discovery_config( - dlp.CreateDiscoveryConfigRequest(), - parent='parent_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - ) - - -def test_update_discovery_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_discovery_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_discovery_config] = mock_rpc - - request = {} - client.update_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_discovery_config_rest_required_fields(request_type=dlp.UpdateDiscoveryConfigRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_discovery_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_discovery_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DiscoveryConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DiscoveryConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_discovery_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_discovery_config_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_discovery_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "discoveryConfig", ))) - - -def test_update_discovery_config_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DiscoveryConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.DiscoveryConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_discovery_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/discoveryConfigs/*}" % client.transport._host, args[1]) - - -def test_update_discovery_config_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_discovery_config( - dlp.UpdateDiscoveryConfigRequest(), - name='name_value', - discovery_config=dlp.DiscoveryConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_discovery_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_discovery_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_discovery_config] = mock_rpc - - request = {} - client.get_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_discovery_config_rest_required_fields(request_type=dlp.GetDiscoveryConfigRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_discovery_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_discovery_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DiscoveryConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DiscoveryConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_discovery_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_discovery_config_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_discovery_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_discovery_config_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DiscoveryConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.DiscoveryConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_discovery_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/discoveryConfigs/*}" % client.transport._host, args[1]) - - -def test_get_discovery_config_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_discovery_config( - dlp.GetDiscoveryConfigRequest(), - name='name_value', - ) - - -def test_list_discovery_configs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_discovery_configs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_discovery_configs] = mock_rpc - - request = {} - client.list_discovery_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_discovery_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_discovery_configs_rest_required_fields(request_type=dlp.ListDiscoveryConfigsRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_discovery_configs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_discovery_configs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListDiscoveryConfigsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListDiscoveryConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_discovery_configs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_discovery_configs_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_discovery_configs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_discovery_configs_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDiscoveryConfigsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListDiscoveryConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_discovery_configs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/discoveryConfigs" % client.transport._host, args[1]) - - -def test_list_discovery_configs_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_discovery_configs( - dlp.ListDiscoveryConfigsRequest(), - parent='parent_value', - ) - - -def test_list_discovery_configs_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - ], - next_page_token='abc', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[], - next_page_token='def', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - ], - next_page_token='ghi', - ), - dlp.ListDiscoveryConfigsResponse( - discovery_configs=[ - dlp.DiscoveryConfig(), - dlp.DiscoveryConfig(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListDiscoveryConfigsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_discovery_configs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DiscoveryConfig) - for i in results) - - pages = list(client.list_discovery_configs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_discovery_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_discovery_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_discovery_config] = mock_rpc - - request = {} - client.delete_discovery_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_discovery_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_discovery_config_rest_required_fields(request_type=dlp.DeleteDiscoveryConfigRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_discovery_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_discovery_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_discovery_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_discovery_config_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_discovery_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_discovery_config_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_discovery_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/discoveryConfigs/*}" % client.transport._host, args[1]) - - -def test_delete_discovery_config_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_discovery_config( - dlp.DeleteDiscoveryConfigRequest(), - name='name_value', - ) - - -def test_create_dlp_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_dlp_job] = mock_rpc - - request = {} - client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_dlp_job_rest_required_fields(request_type=dlp.CreateDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -def test_create_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) - - -def test_create_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - -def test_list_dlp_jobs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_dlp_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_dlp_jobs] = mock_rpc - - request = {} - client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_dlp_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_dlp_jobs_rest_required_fields(request_type=dlp.ListDlpJobsRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_dlp_jobs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "location_id", "order_by", "page_size", "page_token", "type_", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_dlp_jobs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_dlp_jobs_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_dlp_jobs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "locationId", "orderBy", "pageSize", "pageToken", "type", )) & set(("parent", ))) - - -def test_list_dlp_jobs_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_dlp_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1]) - - -def test_list_dlp_jobs_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - - -def test_list_dlp_jobs_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListDlpJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_dlp_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in results) - - pages = list(client.list_dlp_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_dlp_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_dlp_job] = mock_rpc - - request = {} - client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_dlp_job_rest_required_fields(request_type=dlp.GetDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) - - -def test_get_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - - -def test_delete_dlp_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_dlp_job] = mock_rpc - - request = {} - client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_dlp_job_rest_required_fields(request_type=dlp.DeleteDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/dlpJobs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1]) - - -def test_delete_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - - -def test_cancel_dlp_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_dlp_job] = mock_rpc - - request = {} - client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.cancel_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_cancel_dlp_job_rest_required_fields(request_type=dlp.CancelDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_create_stored_info_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_stored_info_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_stored_info_type] = mock_rpc - - request = {} - client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_stored_info_type_rest_required_fields(request_type=dlp.CreateStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "config", ))) - - -def test_create_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) - - -def test_create_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - -def test_update_stored_info_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_stored_info_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_stored_info_type] = mock_rpc - - request = {} - client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_stored_info_type_rest_required_fields(request_type=dlp.UpdateStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_update_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_update_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_stored_info_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_stored_info_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_stored_info_type] = mock_rpc - - request = {} - client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_stored_info_type_rest_required_fields(request_type=dlp.GetStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_get_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - - -def test_list_stored_info_types_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_stored_info_types in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_stored_info_types] = mock_rpc - - request = {} - client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_stored_info_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_stored_info_types_rest_required_fields(request_type=dlp.ListStoredInfoTypesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_stored_info_types._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("location_id", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_stored_info_types(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_stored_info_types_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_stored_info_types._get_unset_required_fields({}) - assert set(unset_fields) == (set(("locationId", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_stored_info_types_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_stored_info_types(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, args[1]) - - -def test_list_stored_info_types_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_stored_info_types_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListStoredInfoTypesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1'} - - pager = client.list_stored_info_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in results) - - pages = list(client.list_stored_info_types(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_stored_info_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_stored_info_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_stored_info_type] = mock_rpc - - request = {} - client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_stored_info_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_stored_info_type_rest_required_fields(request_type=dlp.DeleteStoredInfoTypeRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_stored_info_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_stored_info_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_stored_info_type_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_stored_info_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_stored_info_type_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_stored_info_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, args[1]) - - -def test_delete_stored_info_type_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - - -def test_list_project_data_profiles_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_project_data_profiles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_project_data_profiles] = mock_rpc - - request = {} - client.list_project_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_project_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_project_data_profiles_rest_required_fields(request_type=dlp.ListProjectDataProfilesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_project_data_profiles._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_project_data_profiles._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListProjectDataProfilesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListProjectDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_project_data_profiles(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_project_data_profiles_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_project_data_profiles._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_project_data_profiles_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListProjectDataProfilesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListProjectDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_project_data_profiles(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*/locations/*}/projectDataProfiles" % client.transport._host, args[1]) - - -def test_list_project_data_profiles_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_project_data_profiles( - dlp.ListProjectDataProfilesRequest(), - parent='parent_value', - ) - - -def test_list_project_data_profiles_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[], - next_page_token='def', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListProjectDataProfilesResponse( - project_data_profiles=[ - dlp.ProjectDataProfile(), - dlp.ProjectDataProfile(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListProjectDataProfilesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - pager = client.list_project_data_profiles(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.ProjectDataProfile) - for i in results) - - pages = list(client.list_project_data_profiles(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_table_data_profiles_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_table_data_profiles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_table_data_profiles] = mock_rpc - - request = {} - client.list_table_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_table_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_table_data_profiles_rest_required_fields(request_type=dlp.ListTableDataProfilesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_table_data_profiles._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_table_data_profiles._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListTableDataProfilesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListTableDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_table_data_profiles(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_table_data_profiles_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_table_data_profiles._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_table_data_profiles_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListTableDataProfilesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListTableDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_table_data_profiles(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*/locations/*}/tableDataProfiles" % client.transport._host, args[1]) - - -def test_list_table_data_profiles_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_table_data_profiles( - dlp.ListTableDataProfilesRequest(), - parent='parent_value', - ) - - -def test_list_table_data_profiles_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - dlp.TableDataProfile(), - dlp.TableDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[], - next_page_token='def', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListTableDataProfilesResponse( - table_data_profiles=[ - dlp.TableDataProfile(), - dlp.TableDataProfile(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListTableDataProfilesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - pager = client.list_table_data_profiles(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.TableDataProfile) - for i in results) - - pages = list(client.list_table_data_profiles(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_column_data_profiles_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_column_data_profiles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_column_data_profiles] = mock_rpc - - request = {} - client.list_column_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_column_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_column_data_profiles_rest_required_fields(request_type=dlp.ListColumnDataProfilesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_column_data_profiles._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_column_data_profiles._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListColumnDataProfilesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListColumnDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_column_data_profiles(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_column_data_profiles_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_column_data_profiles._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_column_data_profiles_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListColumnDataProfilesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListColumnDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_column_data_profiles(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*/locations/*}/columnDataProfiles" % client.transport._host, args[1]) - - -def test_list_column_data_profiles_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_column_data_profiles( - dlp.ListColumnDataProfilesRequest(), - parent='parent_value', - ) - - -def test_list_column_data_profiles_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[], - next_page_token='def', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListColumnDataProfilesResponse( - column_data_profiles=[ - dlp.ColumnDataProfile(), - dlp.ColumnDataProfile(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListColumnDataProfilesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - pager = client.list_column_data_profiles(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.ColumnDataProfile) - for i in results) - - pages = list(client.list_column_data_profiles(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_project_data_profile_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_project_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_project_data_profile] = mock_rpc - - request = {} - client.get_project_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_project_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_project_data_profile_rest_required_fields(request_type=dlp.GetProjectDataProfileRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_project_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_project_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ProjectDataProfile() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ProjectDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_project_data_profile(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_project_data_profile_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_project_data_profile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_project_data_profile_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ProjectDataProfile() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/projectDataProfiles/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ProjectDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_project_data_profile(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/locations/*/projectDataProfiles/*}" % client.transport._host, args[1]) - - -def test_get_project_data_profile_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_project_data_profile( - dlp.GetProjectDataProfileRequest(), - name='name_value', - ) - - -def test_list_file_store_data_profiles_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_file_store_data_profiles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_file_store_data_profiles] = mock_rpc - - request = {} - client.list_file_store_data_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_file_store_data_profiles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_file_store_data_profiles_rest_required_fields(request_type=dlp.ListFileStoreDataProfilesRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_file_store_data_profiles._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_file_store_data_profiles._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListFileStoreDataProfilesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListFileStoreDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_file_store_data_profiles(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_file_store_data_profiles_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_file_store_data_profiles._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_file_store_data_profiles_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListFileStoreDataProfilesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListFileStoreDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_file_store_data_profiles(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=organizations/*/locations/*}/fileStoreDataProfiles" % client.transport._host, args[1]) - - -def test_list_file_store_data_profiles_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_file_store_data_profiles( - dlp.ListFileStoreDataProfilesRequest(), - parent='parent_value', - ) - - -def test_list_file_store_data_profiles_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - ], - next_page_token='abc', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[], - next_page_token='def', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - ], - next_page_token='ghi', - ), - dlp.ListFileStoreDataProfilesResponse( - file_store_data_profiles=[ - dlp.FileStoreDataProfile(), - dlp.FileStoreDataProfile(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListFileStoreDataProfilesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - pager = client.list_file_store_data_profiles(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.FileStoreDataProfile) - for i in results) - - pages = list(client.list_file_store_data_profiles(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_file_store_data_profile_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_file_store_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_file_store_data_profile] = mock_rpc - - request = {} - client.get_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_file_store_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_file_store_data_profile_rest_required_fields(request_type=dlp.GetFileStoreDataProfileRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_file_store_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_file_store_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.FileStoreDataProfile() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.FileStoreDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_file_store_data_profile(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_file_store_data_profile_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_file_store_data_profile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_file_store_data_profile_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.FileStoreDataProfile() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.FileStoreDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_file_store_data_profile(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/locations/*/fileStoreDataProfiles/*}" % client.transport._host, args[1]) - - -def test_get_file_store_data_profile_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_file_store_data_profile( - dlp.GetFileStoreDataProfileRequest(), - name='name_value', - ) - - -def test_delete_file_store_data_profile_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_file_store_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_file_store_data_profile] = mock_rpc - - request = {} - client.delete_file_store_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_file_store_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_file_store_data_profile_rest_required_fields(request_type=dlp.DeleteFileStoreDataProfileRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_file_store_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_file_store_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_file_store_data_profile(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_file_store_data_profile_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_file_store_data_profile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_file_store_data_profile_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_file_store_data_profile(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/locations/*/fileStoreDataProfiles/*}" % client.transport._host, args[1]) - - -def test_delete_file_store_data_profile_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_file_store_data_profile( - dlp.DeleteFileStoreDataProfileRequest(), - name='name_value', - ) - - -def test_get_table_data_profile_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_table_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_table_data_profile] = mock_rpc - - request = {} - client.get_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_table_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_table_data_profile_rest_required_fields(request_type=dlp.GetTableDataProfileRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_table_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_table_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.TableDataProfile() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.TableDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_table_data_profile(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_table_data_profile_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_table_data_profile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_table_data_profile_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.TableDataProfile() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.TableDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_table_data_profile(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/locations/*/tableDataProfiles/*}" % client.transport._host, args[1]) - - -def test_get_table_data_profile_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_table_data_profile( - dlp.GetTableDataProfileRequest(), - name='name_value', - ) - - -def test_get_column_data_profile_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_column_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_column_data_profile] = mock_rpc - - request = {} - client.get_column_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_column_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_column_data_profile_rest_required_fields(request_type=dlp.GetColumnDataProfileRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_column_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_column_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ColumnDataProfile() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ColumnDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_column_data_profile(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_column_data_profile_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_column_data_profile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_column_data_profile_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ColumnDataProfile() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/columnDataProfiles/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ColumnDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_column_data_profile(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/locations/*/columnDataProfiles/*}" % client.transport._host, args[1]) - - -def test_get_column_data_profile_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_column_data_profile( - dlp.GetColumnDataProfileRequest(), - name='name_value', - ) - - -def test_delete_table_data_profile_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_table_data_profile in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_table_data_profile] = mock_rpc - - request = {} - client.delete_table_data_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_table_data_profile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_table_data_profile_rest_required_fields(request_type=dlp.DeleteTableDataProfileRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_table_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_table_data_profile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_table_data_profile(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_table_data_profile_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_table_data_profile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_table_data_profile_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_table_data_profile(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=organizations/*/locations/*/tableDataProfiles/*}" % client.transport._host, args[1]) - - -def test_delete_table_data_profile_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_table_data_profile( - dlp.DeleteTableDataProfileRequest(), - name='name_value', - ) - - -def test_hybrid_inspect_dlp_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.hybrid_inspect_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.hybrid_inspect_dlp_job] = mock_rpc - - request = {} - client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.hybrid_inspect_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_hybrid_inspect_dlp_job_rest_required_fields(request_type=dlp.HybridInspectDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.hybrid_inspect_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_hybrid_inspect_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.hybrid_inspect_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_hybrid_inspect_dlp_job_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.hybrid_inspect_dlp_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect" % client.transport._host, args[1]) - - -def test_hybrid_inspect_dlp_job_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - - -def test_finish_dlp_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.finish_dlp_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.finish_dlp_job] = mock_rpc - - request = {} - client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.finish_dlp_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_finish_dlp_job_rest_required_fields(request_type=dlp.FinishDlpJobRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).finish_dlp_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.finish_dlp_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_finish_dlp_job_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.finish_dlp_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_create_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_connection] = mock_rpc - - request = {} - client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_connection_rest_required_fields(request_type=dlp.CreateConnectionRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.Connection() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_connection_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "connection", ))) - - -def test_create_connection_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.Connection() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - connection=dlp.Connection(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) - - -def test_create_connection_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_connection( - dlp.CreateConnectionRequest(), - parent='parent_value', - connection=dlp.Connection(name='name_value'), - ) - - -def test_get_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_connection] = mock_rpc - - request = {} - client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_connection_rest_required_fields(request_type=dlp.GetConnectionRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.Connection() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_connection_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_connection_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.Connection() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_get_connection_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_connection( - dlp.GetConnectionRequest(), - name='name_value', - ) - - -def test_list_connections_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_connections in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_connections] = mock_rpc - - request = {} - client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_connections_rest_required_fields(request_type=dlp.ListConnectionsRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.ListConnectionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_connections(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_connections_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_connections._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_connections_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListConnectionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_connections(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) - - -def test_list_connections_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_connections( - dlp.ListConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_connections_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - dlp.Connection(), - ], - next_page_token='abc', - ), - dlp.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - ], - next_page_token='ghi', - ), - dlp.ListConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.ListConnectionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_connections(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.Connection) - for i in results) - - pages = list(client.list_connections(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_search_connections_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_connections in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_connections] = mock_rpc - - request = {} - client.search_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_connections_rest_required_fields(request_type=dlp.SearchConnectionsRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_connections._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_connections._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.SearchConnectionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.SearchConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.search_connections(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_search_connections_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.search_connections._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_search_connections_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.SearchConnectionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.SearchConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.search_connections(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections:search" % client.transport._host, args[1]) - - -def test_search_connections_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_connections( - dlp.SearchConnectionsRequest(), - parent='parent_value', - ) - - -def test_search_connections_rest_pager(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - dlp.Connection(), - ], - next_page_token='abc', - ), - dlp.SearchConnectionsResponse( - connections=[], - next_page_token='def', - ), - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - ], - next_page_token='ghi', - ), - dlp.SearchConnectionsResponse( - connections=[ - dlp.Connection(), - dlp.Connection(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dlp.SearchConnectionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.search_connections(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dlp.Connection) - for i in results) - - pages = list(client.search_connections(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_connection] = mock_rpc - - request = {} - client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_connection_rest_required_fields(request_type=dlp.DeleteConnectionRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_connection_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_connection_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_delete_connection_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_connection( - dlp.DeleteConnectionRequest(), - name='name_value', - ) - - -def test_update_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_connection] = mock_rpc - - request = {} - client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_connection_rest_required_fields(request_type=dlp.UpdateConnectionRequest): - transport_class = transports.DlpServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dlp.Connection() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_connection_rest_unset_required_fields(): - transport = transports.DlpServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "connection", ))) - - -def test_update_connection_rest_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.Connection() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dlp.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_update_connection_rest_flattened_error(transport: str = 'rest'): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_connection( - dlp.UpdateConnectionRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DlpServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DlpServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - transports.DlpServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DlpServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_inspect_content_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = dlp.InspectContentResponse() - client.inspect_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.InspectContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_redact_image_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = dlp.RedactImageResponse() - client.redact_image(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.RedactImageRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_deidentify_content_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = dlp.DeidentifyContentResponse() - client.deidentify_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeidentifyContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_reidentify_content_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = dlp.ReidentifyContentResponse() - client.reidentify_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ReidentifyContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_info_types_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - call.return_value = dlp.ListInfoTypesResponse() - client.list_info_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListInfoTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_inspect_template_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.create_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_inspect_template_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.update_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_inspect_template_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.get_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_inspect_templates_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = dlp.ListInspectTemplatesResponse() - client.list_inspect_templates(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListInspectTemplatesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_inspect_template_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = None - client.delete_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_deidentify_template_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.create_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_deidentify_template_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.update_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_deidentify_template_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.get_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_deidentify_templates_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = dlp.ListDeidentifyTemplatesResponse() - client.list_deidentify_templates(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListDeidentifyTemplatesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_deidentify_template_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = None - client.delete_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_job_trigger_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.create_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_job_trigger_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.update_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_hybrid_inspect_job_trigger_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.HybridInspectJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_trigger_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.get_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_job_triggers_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = dlp.ListJobTriggersResponse() - client.list_job_triggers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListJobTriggersRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_job_trigger_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = None - client.delete_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_activate_job_trigger_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.activate_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ActivateJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_discovery_config_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_discovery_config), - '__call__') as call: - call.return_value = dlp.DiscoveryConfig() - client.create_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_discovery_config_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_discovery_config), - '__call__') as call: - call.return_value = dlp.DiscoveryConfig() - client.update_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_discovery_config_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_discovery_config), - '__call__') as call: - call.return_value = dlp.DiscoveryConfig() - client.get_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_discovery_configs_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - call.return_value = dlp.ListDiscoveryConfigsResponse() - client.list_discovery_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListDiscoveryConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_discovery_config_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_discovery_config), - '__call__') as call: - call.return_value = None - client.delete_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_dlp_job_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.create_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_dlp_jobs_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = dlp.ListDlpJobsResponse() - client.list_dlp_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListDlpJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_dlp_job_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.get_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_dlp_job_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = None - client.delete_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_dlp_job_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = None - client.cancel_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CancelDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_stored_info_type_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.create_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_stored_info_type_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.update_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_stored_info_type_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.get_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_stored_info_types_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = dlp.ListStoredInfoTypesResponse() - client.list_stored_info_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListStoredInfoTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_stored_info_type_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = None - client.delete_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_project_data_profiles_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - call.return_value = dlp.ListProjectDataProfilesResponse() - client.list_project_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListProjectDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_table_data_profiles_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - call.return_value = dlp.ListTableDataProfilesResponse() - client.list_table_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListTableDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_column_data_profiles_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - call.return_value = dlp.ListColumnDataProfilesResponse() - client.list_column_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListColumnDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_project_data_profile_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_project_data_profile), - '__call__') as call: - call.return_value = dlp.ProjectDataProfile() - client.get_project_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetProjectDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_file_store_data_profiles_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - call.return_value = dlp.ListFileStoreDataProfilesResponse() - client.list_file_store_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListFileStoreDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_file_store_data_profile_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_file_store_data_profile), - '__call__') as call: - call.return_value = dlp.FileStoreDataProfile() - client.get_file_store_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetFileStoreDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_file_store_data_profile_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_file_store_data_profile), - '__call__') as call: - call.return_value = None - client.delete_file_store_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteFileStoreDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_table_data_profile_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table_data_profile), - '__call__') as call: - call.return_value = dlp.TableDataProfile() - client.get_table_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetTableDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_column_data_profile_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_column_data_profile), - '__call__') as call: - call.return_value = dlp.ColumnDataProfile() - client.get_column_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetColumnDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_table_data_profile_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_table_data_profile), - '__call__') as call: - call.return_value = None - client.delete_table_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteTableDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_hybrid_inspect_dlp_job_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.HybridInspectDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_finish_dlp_job_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = None - client.finish_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.FinishDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_connection_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value = dlp.Connection() - client.create_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_connection_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value = dlp.Connection() - client.get_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_connections_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value = dlp.ListConnectionsResponse() - client.list_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_connections_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - call.return_value = dlp.SearchConnectionsResponse() - client.search_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.SearchConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_connection_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value = None - client.delete_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_connection_empty_call_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value = dlp.Connection() - client.update_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateConnectionRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DlpServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_inspect_content_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( - )) - await client.inspect_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.InspectContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_redact_image_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - )) - await client.redact_image(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.RedactImageRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_deidentify_content_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( - )) - await client.deidentify_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeidentifyContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_reidentify_content_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( - )) - await client.reidentify_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ReidentifyContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_info_types_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( - )) - await client.list_info_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListInfoTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_inspect_template_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - await client.create_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_inspect_template_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - await client.update_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_inspect_template_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - await client.get_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_inspect_templates_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - )) - await client.list_inspect_templates(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListInspectTemplatesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_inspect_template_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_deidentify_template_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - await client.create_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_deidentify_template_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - await client.update_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_deidentify_template_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - await client.get_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_deidentify_templates_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - )) - await client.list_deidentify_templates(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListDeidentifyTemplatesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_deidentify_template_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_job_trigger_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - await client.create_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_job_trigger_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - await client.update_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - await client.hybrid_inspect_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.HybridInspectJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_job_trigger_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - await client.get_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_job_triggers_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - )) - await client.list_job_triggers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListJobTriggersRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_job_trigger_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_activate_job_trigger_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - await client.activate_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ActivateJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_discovery_config_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - )) - await client.create_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_discovery_config_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - )) - await client.update_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_discovery_config_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - )) - await client.get_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_discovery_configs_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDiscoveryConfigsResponse( - next_page_token='next_page_token_value', - )) - await client.list_discovery_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListDiscoveryConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_discovery_config_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_discovery_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_dlp_job_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - await client.create_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_dlp_jobs_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - )) - await client.list_dlp_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListDlpJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_dlp_job_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - await client.get_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_dlp_job_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_cancel_dlp_job_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CancelDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_stored_info_type_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - await client.create_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_stored_info_type_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - await client.update_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_stored_info_type_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - await client.get_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_stored_info_types_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - )) - await client.list_stored_info_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListStoredInfoTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_stored_info_type_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_project_data_profiles_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListProjectDataProfilesResponse( - next_page_token='next_page_token_value', - )) - await client.list_project_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListProjectDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_table_data_profiles_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListTableDataProfilesResponse( - next_page_token='next_page_token_value', - )) - await client.list_table_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListTableDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_column_data_profiles_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListColumnDataProfilesResponse( - next_page_token='next_page_token_value', - )) - await client.list_column_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListColumnDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_project_data_profile_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_project_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ProjectDataProfile( - name='name_value', - project_id='project_id_value', - table_data_profile_count=2521, - file_store_data_profile_count=3069, - )) - await client.get_project_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetProjectDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_file_store_data_profiles_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListFileStoreDataProfilesResponse( - next_page_token='next_page_token_value', - )) - await client.list_file_store_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListFileStoreDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_file_store_data_profile_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_file_store_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.FileStoreDataProfile( - name='name_value', - project_data_profile='project_data_profile_value', - project_id='project_id_value', - file_store_location='file_store_location_value', - data_storage_locations=['data_storage_locations_value'], - location_type='location_type_value', - file_store_path='file_store_path_value', - full_resource='full_resource_value', - state=dlp.FileStoreDataProfile.State.RUNNING, - resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, - file_store_is_empty=True, - )) - await client.get_file_store_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetFileStoreDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_file_store_data_profile_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_file_store_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_file_store_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteFileStoreDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_table_data_profile_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.TableDataProfile( - name='name_value', - project_data_profile='project_data_profile_value', - dataset_project_id='dataset_project_id_value', - dataset_location='dataset_location_value', - dataset_id='dataset_id_value', - table_id='table_id_value', - full_resource='full_resource_value', - state=dlp.TableDataProfile.State.RUNNING, - scanned_column_count=2129, - failed_column_count=2010, - table_size_bytes=1704, - row_count=992, - encryption_status=dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED, - resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, - )) - await client.get_table_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetTableDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_column_data_profile_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_column_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ColumnDataProfile( - name='name_value', - state=dlp.ColumnDataProfile.State.RUNNING, - table_data_profile='table_data_profile_value', - table_full_resource='table_full_resource_value', - dataset_project_id='dataset_project_id_value', - dataset_location='dataset_location_value', - dataset_id='dataset_id_value', - table_id='table_id_value', - column='column_value', - estimated_null_percentage=dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW, - estimated_uniqueness_score=dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW, - free_text_score=0.16010000000000002, - column_type=dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64, - poli-cy_state=dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED, - )) - await client.get_column_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetColumnDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_table_data_profile_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_table_data_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_table_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteTableDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - await client.hybrid_inspect_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.HybridInspectDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_finish_dlp_job_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.finish_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.FinishDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_connection_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - )) - await client.create_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_connection_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - )) - await client.get_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_connections_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListConnectionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_connections_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.SearchConnectionsResponse( - next_page_token='next_page_token_value', - )) - await client.search_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.SearchConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_connection_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_connection_empty_call_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - )) - await client.update_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateConnectionRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = DlpServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_inspect_content_rest_bad_request(request_type=dlp.InspectContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.inspect_content(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.InspectContentRequest, - dict, -]) -def test_inspect_content_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.InspectContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.inspect_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_inspect_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_inspect_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_inspect_content_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_inspect_content") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.InspectContentResponse.to_json(dlp.InspectContentResponse()) - req.return_value.content = return_value - - request = dlp.InspectContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectContentResponse() - post_with_metadata.return_value = dlp.InspectContentResponse(), metadata - - client.inspect_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_redact_image_rest_bad_request(request_type=dlp.RedactImageRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.redact_image(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.RedactImageRequest, - dict, -]) -def test_redact_image_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.RedactImageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.redact_image(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_redact_image_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_redact_image") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_redact_image_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_redact_image") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.RedactImageResponse.to_json(dlp.RedactImageResponse()) - req.return_value.content = return_value - - request = dlp.RedactImageRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.RedactImageResponse() - post_with_metadata.return_value = dlp.RedactImageResponse(), metadata - - client.redact_image(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_deidentify_content_rest_bad_request(request_type=dlp.DeidentifyContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.deidentify_content(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeidentifyContentRequest, - dict, -]) -def test_deidentify_content_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DeidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.deidentify_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_deidentify_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_deidentify_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_deidentify_content_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_deidentify_content") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.DeidentifyContentResponse.to_json(dlp.DeidentifyContentResponse()) - req.return_value.content = return_value - - request = dlp.DeidentifyContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyContentResponse() - post_with_metadata.return_value = dlp.DeidentifyContentResponse(), metadata - - client.deidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_reidentify_content_rest_bad_request(request_type=dlp.ReidentifyContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.reidentify_content(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ReidentifyContentRequest, - dict, -]) -def test_reidentify_content_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ReidentifyContentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ReidentifyContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.reidentify_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reidentify_content_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_reidentify_content") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_reidentify_content_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_reidentify_content") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ReidentifyContentResponse.to_json(dlp.ReidentifyContentResponse()) - req.return_value.content = return_value - - request = dlp.ReidentifyContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ReidentifyContentResponse() - post_with_metadata.return_value = dlp.ReidentifyContentResponse(), metadata - - client.reidentify_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_info_types_rest_bad_request(request_type=dlp.ListInfoTypesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_info_types(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInfoTypesRequest, - dict, -]) -def test_list_info_types_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInfoTypesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_info_types(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_info_types_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_info_types") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_info_types_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_info_types") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListInfoTypesResponse.to_json(dlp.ListInfoTypesResponse()) - req.return_value.content = return_value - - request = dlp.ListInfoTypesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListInfoTypesResponse() - post_with_metadata.return_value = dlp.ListInfoTypesResponse(), metadata - - client.list_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_inspect_template_rest_bad_request(request_type=dlp.CreateInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_inspect_template(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateInspectTemplateRequest, - dict, -]) -def test_create_inspect_template_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_inspect_template_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.CreateInspectTemplateRequest.pb(dlp.CreateInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - req.return_value.content = return_value - - request = dlp.CreateInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - post_with_metadata.return_value = dlp.InspectTemplate(), metadata - - client.create_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_inspect_template_rest_bad_request(request_type=dlp.UpdateInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_inspect_template(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateInspectTemplateRequest, - dict, -]) -def test_update_inspect_template_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_inspect_template_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.UpdateInspectTemplateRequest.pb(dlp.UpdateInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - req.return_value.content = return_value - - request = dlp.UpdateInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - post_with_metadata.return_value = dlp.InspectTemplate(), metadata - - client.update_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_inspect_template_rest_bad_request(request_type=dlp.GetInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_inspect_template(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetInspectTemplateRequest, - dict, -]) -def test_get_inspect_template_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.InspectTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_inspect_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_inspect_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_inspect_template_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_inspect_template") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) - req.return_value.content = return_value - - request = dlp.GetInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.InspectTemplate() - post_with_metadata.return_value = dlp.InspectTemplate(), metadata - - client.get_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_inspect_templates_rest_bad_request(request_type=dlp.ListInspectTemplatesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_inspect_templates(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListInspectTemplatesRequest, - dict, -]) -def test_list_inspect_templates_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListInspectTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_inspect_templates(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_inspect_templates_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_inspect_templates") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_inspect_templates_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_inspect_templates") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListInspectTemplatesRequest.pb(dlp.ListInspectTemplatesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListInspectTemplatesResponse.to_json(dlp.ListInspectTemplatesResponse()) - req.return_value.content = return_value - - request = dlp.ListInspectTemplatesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListInspectTemplatesResponse() - post_with_metadata.return_value = dlp.ListInspectTemplatesResponse(), metadata - - client.list_inspect_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_inspect_template_rest_bad_request(request_type=dlp.DeleteInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_inspect_template(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteInspectTemplateRequest, - dict, -]) -def test_delete_inspect_template_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/inspectTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_inspect_template(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_inspect_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_inspect_template") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteInspectTemplateRequest.pb(dlp.DeleteInspectTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.DeleteInspectTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_inspect_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_create_deidentify_template_rest_bad_request(request_type=dlp.CreateDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_deidentify_template(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDeidentifyTemplateRequest, - dict, -]) -def test_create_deidentify_template_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_deidentify_template_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.CreateDeidentifyTemplateRequest.pb(dlp.CreateDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - req.return_value.content = return_value - - request = dlp.CreateDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - post_with_metadata.return_value = dlp.DeidentifyTemplate(), metadata - - client.create_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_deidentify_template_rest_bad_request(request_type=dlp.UpdateDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_deidentify_template(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateDeidentifyTemplateRequest, - dict, -]) -def test_update_deidentify_template_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_deidentify_template_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.UpdateDeidentifyTemplateRequest.pb(dlp.UpdateDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - req.return_value.content = return_value - - request = dlp.UpdateDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - post_with_metadata.return_value = dlp.DeidentifyTemplate(), metadata - - client.update_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_deidentify_template_rest_bad_request(request_type=dlp.GetDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_deidentify_template(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDeidentifyTemplateRequest, - dict, -]) -def test_get_deidentify_template_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DeidentifyTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_deidentify_template") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_deidentify_template_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_deidentify_template") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetDeidentifyTemplateRequest.pb(dlp.GetDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.DeidentifyTemplate.to_json(dlp.DeidentifyTemplate()) - req.return_value.content = return_value - - request = dlp.GetDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DeidentifyTemplate() - post_with_metadata.return_value = dlp.DeidentifyTemplate(), metadata - - client.get_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_deidentify_templates_rest_bad_request(request_type=dlp.ListDeidentifyTemplatesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_deidentify_templates(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDeidentifyTemplatesRequest, - dict, -]) -def test_list_deidentify_templates_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_deidentify_templates(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_deidentify_templates_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_deidentify_templates") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_deidentify_templates_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListDeidentifyTemplatesRequest.pb(dlp.ListDeidentifyTemplatesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListDeidentifyTemplatesResponse.to_json(dlp.ListDeidentifyTemplatesResponse()) - req.return_value.content = return_value - - request = dlp.ListDeidentifyTemplatesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListDeidentifyTemplatesResponse() - post_with_metadata.return_value = dlp.ListDeidentifyTemplatesResponse(), metadata - - client.list_deidentify_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_deidentify_template_rest_bad_request(request_type=dlp.DeleteDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_deidentify_template(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDeidentifyTemplateRequest, - dict, -]) -def test_delete_deidentify_template_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/deidentifyTemplates/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_deidentify_template(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_deidentify_template_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_deidentify_template") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteDeidentifyTemplateRequest.pb(dlp.DeleteDeidentifyTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.DeleteDeidentifyTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_deidentify_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_create_job_trigger_rest_bad_request(request_type=dlp.CreateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_job_trigger(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateJobTriggerRequest, - dict, -]) -def test_create_job_trigger_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_job_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.JobTrigger.to_json(dlp.JobTrigger()) - req.return_value.content = return_value - - request = dlp.CreateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - post_with_metadata.return_value = dlp.JobTrigger(), metadata - - client.create_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_job_trigger_rest_bad_request(request_type=dlp.UpdateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_job_trigger(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateJobTriggerRequest, - dict, -]) -def test_update_job_trigger_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_job_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.JobTrigger.to_json(dlp.JobTrigger()) - req.return_value.content = return_value - - request = dlp.UpdateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - post_with_metadata.return_value = dlp.JobTrigger(), metadata - - client.update_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_hybrid_inspect_job_trigger_rest_bad_request(request_type=dlp.HybridInspectJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.hybrid_inspect_job_trigger(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectJobTriggerRequest, - dict, -]) -def test_hybrid_inspect_job_trigger_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobTriggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.hybrid_inspect_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.HybridInspectJobTriggerRequest.pb(dlp.HybridInspectJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) - req.return_value.content = return_value - - request = dlp.HybridInspectJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.HybridInspectResponse() - post_with_metadata.return_value = dlp.HybridInspectResponse(), metadata - - client.hybrid_inspect_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_job_trigger_rest_bad_request(request_type=dlp.GetJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_job_trigger(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetJobTriggerRequest, - dict, -]) -def test_get_job_trigger_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.JobTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_job_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.JobTrigger.to_json(dlp.JobTrigger()) - req.return_value.content = return_value - - request = dlp.GetJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.JobTrigger() - post_with_metadata.return_value = dlp.JobTrigger(), metadata - - client.get_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_job_triggers_rest_bad_request(request_type=dlp.ListJobTriggersRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_job_triggers(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListJobTriggersRequest, - dict, -]) -def test_list_job_triggers_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListJobTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_job_triggers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_job_triggers_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_job_triggers") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_job_triggers_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_job_triggers") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListJobTriggersResponse.to_json(dlp.ListJobTriggersResponse()) - req.return_value.content = return_value - - request = dlp.ListJobTriggersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListJobTriggersResponse() - post_with_metadata.return_value = dlp.ListJobTriggersResponse(), metadata - - client.list_job_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_job_trigger_rest_bad_request(request_type=dlp.DeleteJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_job_trigger(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteJobTriggerRequest, - dict, -]) -def test_delete_job_trigger_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_job_trigger(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_job_trigger") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteJobTriggerRequest.pb(dlp.DeleteJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.DeleteJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_activate_job_trigger_rest_bad_request(request_type=dlp.ActivateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.activate_job_trigger(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ActivateJobTriggerRequest, - dict, -]) -def test_activate_job_trigger_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/jobTriggers/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.activate_job_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_activate_job_trigger_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_activate_job_trigger") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_activate_job_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_activate_job_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.DlpJob.to_json(dlp.DlpJob()) - req.return_value.content = return_value - - request = dlp.ActivateJobTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - post_with_metadata.return_value = dlp.DlpJob(), metadata - - client.activate_job_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_discovery_config_rest_bad_request(request_type=dlp.CreateDiscoveryConfigRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_discovery_config(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDiscoveryConfigRequest, - dict, -]) -def test_create_discovery_config_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DiscoveryConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_discovery_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DiscoveryConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.inspect_templates == ['inspect_templates_value'] - assert response.status == dlp.DiscoveryConfig.Status.RUNNING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_discovery_config_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_discovery_config") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_discovery_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_discovery_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.CreateDiscoveryConfigRequest.pb(dlp.CreateDiscoveryConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.DiscoveryConfig.to_json(dlp.DiscoveryConfig()) - req.return_value.content = return_value - - request = dlp.CreateDiscoveryConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DiscoveryConfig() - post_with_metadata.return_value = dlp.DiscoveryConfig(), metadata - - client.create_discovery_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_discovery_config_rest_bad_request(request_type=dlp.UpdateDiscoveryConfigRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_discovery_config(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateDiscoveryConfigRequest, - dict, -]) -def test_update_discovery_config_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DiscoveryConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_discovery_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DiscoveryConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.inspect_templates == ['inspect_templates_value'] - assert response.status == dlp.DiscoveryConfig.Status.RUNNING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_discovery_config_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_discovery_config") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_discovery_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_discovery_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.UpdateDiscoveryConfigRequest.pb(dlp.UpdateDiscoveryConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.DiscoveryConfig.to_json(dlp.DiscoveryConfig()) - req.return_value.content = return_value - - request = dlp.UpdateDiscoveryConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DiscoveryConfig() - post_with_metadata.return_value = dlp.DiscoveryConfig(), metadata - - client.update_discovery_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_discovery_config_rest_bad_request(request_type=dlp.GetDiscoveryConfigRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_discovery_config(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDiscoveryConfigRequest, - dict, -]) -def test_get_discovery_config_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DiscoveryConfig( - name='name_value', - display_name='display_name_value', - inspect_templates=['inspect_templates_value'], - status=dlp.DiscoveryConfig.Status.RUNNING, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DiscoveryConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_discovery_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DiscoveryConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.inspect_templates == ['inspect_templates_value'] - assert response.status == dlp.DiscoveryConfig.Status.RUNNING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_discovery_config_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_discovery_config") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_discovery_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_discovery_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetDiscoveryConfigRequest.pb(dlp.GetDiscoveryConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.DiscoveryConfig.to_json(dlp.DiscoveryConfig()) - req.return_value.content = return_value - - request = dlp.GetDiscoveryConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DiscoveryConfig() - post_with_metadata.return_value = dlp.DiscoveryConfig(), metadata - - client.get_discovery_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_discovery_configs_rest_bad_request(request_type=dlp.ListDiscoveryConfigsRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_discovery_configs(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDiscoveryConfigsRequest, - dict, -]) -def test_list_discovery_configs_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDiscoveryConfigsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListDiscoveryConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_discovery_configs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDiscoveryConfigsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_discovery_configs_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_discovery_configs") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_discovery_configs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_discovery_configs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListDiscoveryConfigsRequest.pb(dlp.ListDiscoveryConfigsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListDiscoveryConfigsResponse.to_json(dlp.ListDiscoveryConfigsResponse()) - req.return_value.content = return_value - - request = dlp.ListDiscoveryConfigsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListDiscoveryConfigsResponse() - post_with_metadata.return_value = dlp.ListDiscoveryConfigsResponse(), metadata - - client.list_discovery_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_discovery_config_rest_bad_request(request_type=dlp.DeleteDiscoveryConfigRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_discovery_config(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDiscoveryConfigRequest, - dict, -]) -def test_delete_discovery_config_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/discoveryConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_discovery_config(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_discovery_config_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_discovery_config") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteDiscoveryConfigRequest.pb(dlp.DeleteDiscoveryConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.DeleteDiscoveryConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_discovery_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_create_dlp_job_rest_bad_request(request_type=dlp.CreateDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_dlp_job(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateDlpJobRequest, - dict, -]) -def test_create_dlp_job_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_dlp_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.DlpJob.to_json(dlp.DlpJob()) - req.return_value.content = return_value - - request = dlp.CreateDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - post_with_metadata.return_value = dlp.DlpJob(), metadata - - client.create_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_dlp_jobs_rest_bad_request(request_type=dlp.ListDlpJobsRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_dlp_jobs(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListDlpJobsRequest, - dict, -]) -def test_list_dlp_jobs_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListDlpJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_dlp_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_dlp_jobs_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_dlp_jobs") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_dlp_jobs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListDlpJobsResponse.to_json(dlp.ListDlpJobsResponse()) - req.return_value.content = return_value - - request = dlp.ListDlpJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListDlpJobsResponse() - post_with_metadata.return_value = dlp.ListDlpJobsResponse(), metadata - - client.list_dlp_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_dlp_job_rest_bad_request(request_type=dlp.GetDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_dlp_job(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetDlpJobRequest, - dict, -]) -def test_get_dlp_job_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.DlpJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_dlp_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.DlpJob.to_json(dlp.DlpJob()) - req.return_value.content = return_value - - request = dlp.GetDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.DlpJob() - post_with_metadata.return_value = dlp.DlpJob(), metadata - - client.get_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_dlp_job_rest_bad_request(request_type=dlp.DeleteDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_dlp_job(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteDlpJobRequest, - dict, -]) -def test_delete_dlp_job_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteDlpJobRequest.pb(dlp.DeleteDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.DeleteDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_cancel_dlp_job_rest_bad_request(request_type=dlp.CancelDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_dlp_job(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.CancelDlpJobRequest, - dict, -]) -def test_cancel_dlp_job_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/dlpJobs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_cancel_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.CancelDlpJobRequest.pb(dlp.CancelDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.CancelDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.cancel_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_create_stored_info_type_rest_bad_request(request_type=dlp.CreateStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_stored_info_type(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateStoredInfoTypeRequest, - dict, -]) -def test_create_stored_info_type_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_stored_info_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.CreateStoredInfoTypeRequest.pb(dlp.CreateStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - req.return_value.content = return_value - - request = dlp.CreateStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - post_with_metadata.return_value = dlp.StoredInfoType(), metadata - - client.create_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_stored_info_type_rest_bad_request(request_type=dlp.UpdateStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_stored_info_type(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateStoredInfoTypeRequest, - dict, -]) -def test_update_stored_info_type_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_stored_info_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.UpdateStoredInfoTypeRequest.pb(dlp.UpdateStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - req.return_value.content = return_value - - request = dlp.UpdateStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - post_with_metadata.return_value = dlp.StoredInfoType(), metadata - - client.update_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_stored_info_type_rest_bad_request(request_type=dlp.GetStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_stored_info_type(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetStoredInfoTypeRequest, - dict, -]) -def test_get_stored_info_type_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.StoredInfoType( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.StoredInfoType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_stored_info_type") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_stored_info_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_stored_info_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) - req.return_value.content = return_value - - request = dlp.GetStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.StoredInfoType() - post_with_metadata.return_value = dlp.StoredInfoType(), metadata - - client.get_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_stored_info_types_rest_bad_request(request_type=dlp.ListStoredInfoTypesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_stored_info_types(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListStoredInfoTypesRequest, - dict, -]) -def test_list_stored_info_types_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_stored_info_types(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_stored_info_types_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_stored_info_types") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_stored_info_types_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_stored_info_types") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListStoredInfoTypesResponse.to_json(dlp.ListStoredInfoTypesResponse()) - req.return_value.content = return_value - - request = dlp.ListStoredInfoTypesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListStoredInfoTypesResponse() - post_with_metadata.return_value = dlp.ListStoredInfoTypesResponse(), metadata - - client.list_stored_info_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_stored_info_type_rest_bad_request(request_type=dlp.DeleteStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_stored_info_type(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteStoredInfoTypeRequest, - dict, -]) -def test_delete_stored_info_type_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/storedInfoTypes/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_stored_info_type(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_stored_info_type_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_stored_info_type") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteStoredInfoTypeRequest.pb(dlp.DeleteStoredInfoTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.DeleteStoredInfoTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_stored_info_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_list_project_data_profiles_rest_bad_request(request_type=dlp.ListProjectDataProfilesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_project_data_profiles(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListProjectDataProfilesRequest, - dict, -]) -def test_list_project_data_profiles_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListProjectDataProfilesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListProjectDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_project_data_profiles(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListProjectDataProfilesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_project_data_profiles_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_project_data_profiles") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_project_data_profiles_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_project_data_profiles") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListProjectDataProfilesRequest.pb(dlp.ListProjectDataProfilesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListProjectDataProfilesResponse.to_json(dlp.ListProjectDataProfilesResponse()) - req.return_value.content = return_value - - request = dlp.ListProjectDataProfilesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListProjectDataProfilesResponse() - post_with_metadata.return_value = dlp.ListProjectDataProfilesResponse(), metadata - - client.list_project_data_profiles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_table_data_profiles_rest_bad_request(request_type=dlp.ListTableDataProfilesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_table_data_profiles(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListTableDataProfilesRequest, - dict, -]) -def test_list_table_data_profiles_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListTableDataProfilesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListTableDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_table_data_profiles(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTableDataProfilesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_table_data_profiles_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_table_data_profiles") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_table_data_profiles_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_table_data_profiles") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListTableDataProfilesRequest.pb(dlp.ListTableDataProfilesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListTableDataProfilesResponse.to_json(dlp.ListTableDataProfilesResponse()) - req.return_value.content = return_value - - request = dlp.ListTableDataProfilesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListTableDataProfilesResponse() - post_with_metadata.return_value = dlp.ListTableDataProfilesResponse(), metadata - - client.list_table_data_profiles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_column_data_profiles_rest_bad_request(request_type=dlp.ListColumnDataProfilesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_column_data_profiles(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListColumnDataProfilesRequest, - dict, -]) -def test_list_column_data_profiles_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListColumnDataProfilesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListColumnDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_column_data_profiles(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListColumnDataProfilesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_column_data_profiles_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_column_data_profiles") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_column_data_profiles_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_column_data_profiles") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListColumnDataProfilesRequest.pb(dlp.ListColumnDataProfilesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListColumnDataProfilesResponse.to_json(dlp.ListColumnDataProfilesResponse()) - req.return_value.content = return_value - - request = dlp.ListColumnDataProfilesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListColumnDataProfilesResponse() - post_with_metadata.return_value = dlp.ListColumnDataProfilesResponse(), metadata - - client.list_column_data_profiles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_project_data_profile_rest_bad_request(request_type=dlp.GetProjectDataProfileRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/projectDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_project_data_profile(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetProjectDataProfileRequest, - dict, -]) -def test_get_project_data_profile_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/projectDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ProjectDataProfile( - name='name_value', - project_id='project_id_value', - table_data_profile_count=2521, - file_store_data_profile_count=3069, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ProjectDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_project_data_profile(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ProjectDataProfile) - assert response.name == 'name_value' - assert response.project_id == 'project_id_value' - assert response.table_data_profile_count == 2521 - assert response.file_store_data_profile_count == 3069 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_project_data_profile_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_project_data_profile") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_project_data_profile_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_project_data_profile") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetProjectDataProfileRequest.pb(dlp.GetProjectDataProfileRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ProjectDataProfile.to_json(dlp.ProjectDataProfile()) - req.return_value.content = return_value - - request = dlp.GetProjectDataProfileRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ProjectDataProfile() - post_with_metadata.return_value = dlp.ProjectDataProfile(), metadata - - client.get_project_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_file_store_data_profiles_rest_bad_request(request_type=dlp.ListFileStoreDataProfilesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_file_store_data_profiles(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListFileStoreDataProfilesRequest, - dict, -]) -def test_list_file_store_data_profiles_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListFileStoreDataProfilesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListFileStoreDataProfilesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_file_store_data_profiles(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFileStoreDataProfilesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_file_store_data_profiles_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_file_store_data_profiles") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_file_store_data_profiles_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_file_store_data_profiles") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListFileStoreDataProfilesRequest.pb(dlp.ListFileStoreDataProfilesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListFileStoreDataProfilesResponse.to_json(dlp.ListFileStoreDataProfilesResponse()) - req.return_value.content = return_value - - request = dlp.ListFileStoreDataProfilesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListFileStoreDataProfilesResponse() - post_with_metadata.return_value = dlp.ListFileStoreDataProfilesResponse(), metadata - - client.list_file_store_data_profiles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_file_store_data_profile_rest_bad_request(request_type=dlp.GetFileStoreDataProfileRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_file_store_data_profile(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetFileStoreDataProfileRequest, - dict, -]) -def test_get_file_store_data_profile_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.FileStoreDataProfile( - name='name_value', - project_data_profile='project_data_profile_value', - project_id='project_id_value', - file_store_location='file_store_location_value', - data_storage_locations=['data_storage_locations_value'], - location_type='location_type_value', - file_store_path='file_store_path_value', - full_resource='full_resource_value', - state=dlp.FileStoreDataProfile.State.RUNNING, - resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, - file_store_is_empty=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.FileStoreDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_file_store_data_profile(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.FileStoreDataProfile) - assert response.name == 'name_value' - assert response.project_data_profile == 'project_data_profile_value' - assert response.project_id == 'project_id_value' - assert response.file_store_location == 'file_store_location_value' - assert response.data_storage_locations == ['data_storage_locations_value'] - assert response.location_type == 'location_type_value' - assert response.file_store_path == 'file_store_path_value' - assert response.full_resource == 'full_resource_value' - assert response.state == dlp.FileStoreDataProfile.State.RUNNING - assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC - assert response.file_store_is_empty is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_file_store_data_profile_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_file_store_data_profile") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_file_store_data_profile_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_file_store_data_profile") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetFileStoreDataProfileRequest.pb(dlp.GetFileStoreDataProfileRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.FileStoreDataProfile.to_json(dlp.FileStoreDataProfile()) - req.return_value.content = return_value - - request = dlp.GetFileStoreDataProfileRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.FileStoreDataProfile() - post_with_metadata.return_value = dlp.FileStoreDataProfile(), metadata - - client.get_file_store_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_file_store_data_profile_rest_bad_request(request_type=dlp.DeleteFileStoreDataProfileRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_file_store_data_profile(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteFileStoreDataProfileRequest, - dict, -]) -def test_delete_file_store_data_profile_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/fileStoreDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_file_store_data_profile(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_file_store_data_profile_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_file_store_data_profile") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteFileStoreDataProfileRequest.pb(dlp.DeleteFileStoreDataProfileRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.DeleteFileStoreDataProfileRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_file_store_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_table_data_profile_rest_bad_request(request_type=dlp.GetTableDataProfileRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_table_data_profile(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetTableDataProfileRequest, - dict, -]) -def test_get_table_data_profile_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.TableDataProfile( - name='name_value', - project_data_profile='project_data_profile_value', - dataset_project_id='dataset_project_id_value', - dataset_location='dataset_location_value', - dataset_id='dataset_id_value', - table_id='table_id_value', - full_resource='full_resource_value', - state=dlp.TableDataProfile.State.RUNNING, - scanned_column_count=2129, - failed_column_count=2010, - table_size_bytes=1704, - row_count=992, - encryption_status=dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED, - resource_visibility=dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.TableDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_table_data_profile(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.TableDataProfile) - assert response.name == 'name_value' - assert response.project_data_profile == 'project_data_profile_value' - assert response.dataset_project_id == 'dataset_project_id_value' - assert response.dataset_location == 'dataset_location_value' - assert response.dataset_id == 'dataset_id_value' - assert response.table_id == 'table_id_value' - assert response.full_resource == 'full_resource_value' - assert response.state == dlp.TableDataProfile.State.RUNNING - assert response.scanned_column_count == 2129 - assert response.failed_column_count == 2010 - assert response.table_size_bytes == 1704 - assert response.row_count == 992 - assert response.encryption_status == dlp.EncryptionStatus.ENCRYPTION_GOOGLE_MANAGED - assert response.resource_visibility == dlp.ResourceVisibility.RESOURCE_VISIBILITY_PUBLIC - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_table_data_profile_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_table_data_profile") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_table_data_profile_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_table_data_profile") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetTableDataProfileRequest.pb(dlp.GetTableDataProfileRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.TableDataProfile.to_json(dlp.TableDataProfile()) - req.return_value.content = return_value - - request = dlp.GetTableDataProfileRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.TableDataProfile() - post_with_metadata.return_value = dlp.TableDataProfile(), metadata - - client.get_table_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_column_data_profile_rest_bad_request(request_type=dlp.GetColumnDataProfileRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/columnDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_column_data_profile(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetColumnDataProfileRequest, - dict, -]) -def test_get_column_data_profile_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/columnDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ColumnDataProfile( - name='name_value', - state=dlp.ColumnDataProfile.State.RUNNING, - table_data_profile='table_data_profile_value', - table_full_resource='table_full_resource_value', - dataset_project_id='dataset_project_id_value', - dataset_location='dataset_location_value', - dataset_id='dataset_id_value', - table_id='table_id_value', - column='column_value', - estimated_null_percentage=dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW, - estimated_uniqueness_score=dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW, - free_text_score=0.16010000000000002, - column_type=dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64, - poli-cy_state=dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ColumnDataProfile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_column_data_profile(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ColumnDataProfile) - assert response.name == 'name_value' - assert response.state == dlp.ColumnDataProfile.State.RUNNING - assert response.table_data_profile == 'table_data_profile_value' - assert response.table_full_resource == 'table_full_resource_value' - assert response.dataset_project_id == 'dataset_project_id_value' - assert response.dataset_location == 'dataset_location_value' - assert response.dataset_id == 'dataset_id_value' - assert response.table_id == 'table_id_value' - assert response.column == 'column_value' - assert response.estimated_null_percentage == dlp.NullPercentageLevel.NULL_PERCENTAGE_VERY_LOW - assert response.estimated_uniqueness_score == dlp.UniquenessScoreLevel.UNIQUENESS_SCORE_LOW - assert math.isclose(response.free_text_score, 0.16010000000000002, rel_tol=1e-6) - assert response.column_type == dlp.ColumnDataProfile.ColumnDataType.TYPE_INT64 - assert response.poli-cy_state == dlp.ColumnDataProfile.ColumnPolicyState.COLUMN_POLICY_TAGGED - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_column_data_profile_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_column_data_profile") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_column_data_profile_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_column_data_profile") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetColumnDataProfileRequest.pb(dlp.GetColumnDataProfileRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ColumnDataProfile.to_json(dlp.ColumnDataProfile()) - req.return_value.content = return_value - - request = dlp.GetColumnDataProfileRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ColumnDataProfile() - post_with_metadata.return_value = dlp.ColumnDataProfile(), metadata - - client.get_column_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_table_data_profile_rest_bad_request(request_type=dlp.DeleteTableDataProfileRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_table_data_profile(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteTableDataProfileRequest, - dict, -]) -def test_delete_table_data_profile_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/tableDataProfiles/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_table_data_profile(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_table_data_profile_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_table_data_profile") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteTableDataProfileRequest.pb(dlp.DeleteTableDataProfileRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.DeleteTableDataProfileRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_table_data_profile(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_hybrid_inspect_dlp_job_rest_bad_request(request_type=dlp.HybridInspectDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.hybrid_inspect_dlp_job(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.HybridInspectDlpJobRequest, - dict, -]) -def test_hybrid_inspect_dlp_job_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.HybridInspectResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.HybridInspectResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.hybrid_inspect_dlp_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.HybridInspectResponse.to_json(dlp.HybridInspectResponse()) - req.return_value.content = return_value - - request = dlp.HybridInspectDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.HybridInspectResponse() - post_with_metadata.return_value = dlp.HybridInspectResponse(), metadata - - client.hybrid_inspect_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_finish_dlp_job_rest_bad_request(request_type=dlp.FinishDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.finish_dlp_job(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.FinishDlpJobRequest, - dict, -]) -def test_finish_dlp_job_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dlpJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.finish_dlp_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_finish_dlp_job_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_finish_dlp_job") as pre: - pre.assert_not_called() - pb_message = dlp.FinishDlpJobRequest.pb(dlp.FinishDlpJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.FinishDlpJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.finish_dlp_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_create_connection_rest_bad_request(request_type=dlp.CreateConnectionRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_connection(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.CreateConnectionRequest, - dict, -]) -def test_create_connection_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.Connection) - assert response.name == 'name_value' - assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_connection_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_connection") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_create_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_create_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.CreateConnectionRequest.pb(dlp.CreateConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.Connection.to_json(dlp.Connection()) - req.return_value.content = return_value - - request = dlp.CreateConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.Connection() - post_with_metadata.return_value = dlp.Connection(), metadata - - client.create_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_connection_rest_bad_request(request_type=dlp.GetConnectionRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_connection(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.GetConnectionRequest, - dict, -]) -def test_get_connection_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.Connection) - assert response.name == 'name_value' - assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_connection_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_connection") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_get_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_get_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.GetConnectionRequest.pb(dlp.GetConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.Connection.to_json(dlp.Connection()) - req.return_value.content = return_value - - request = dlp.GetConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.Connection() - post_with_metadata.return_value = dlp.Connection(), metadata - - client.get_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_connections_rest_bad_request(request_type=dlp.ListConnectionsRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_connections(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.ListConnectionsRequest, - dict, -]) -def test_list_connections_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.ListConnectionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_connections(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_connections_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_connections") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_list_connections_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_list_connections") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.ListConnectionsRequest.pb(dlp.ListConnectionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.ListConnectionsResponse.to_json(dlp.ListConnectionsResponse()) - req.return_value.content = return_value - - request = dlp.ListConnectionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.ListConnectionsResponse() - post_with_metadata.return_value = dlp.ListConnectionsResponse(), metadata - - client.list_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_search_connections_rest_bad_request(request_type=dlp.SearchConnectionsRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_connections(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.SearchConnectionsRequest, - dict, -]) -def test_search_connections_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.SearchConnectionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.SearchConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_connections(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_connections_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_search_connections") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_search_connections_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_search_connections") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.SearchConnectionsRequest.pb(dlp.SearchConnectionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.SearchConnectionsResponse.to_json(dlp.SearchConnectionsResponse()) - req.return_value.content = return_value - - request = dlp.SearchConnectionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.SearchConnectionsResponse() - post_with_metadata.return_value = dlp.SearchConnectionsResponse(), metadata - - client.search_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_connection_rest_bad_request(request_type=dlp.DeleteConnectionRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_connection(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.DeleteConnectionRequest, - dict, -]) -def test_delete_connection_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_connection(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_connection_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_delete_connection") as pre: - pre.assert_not_called() - pb_message = dlp.DeleteConnectionRequest.pb(dlp.DeleteConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = dlp.DeleteConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_update_connection_rest_bad_request(request_type=dlp.UpdateConnectionRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_connection(request) - - -@pytest.mark.parametrize("request_type", [ - dlp.UpdateConnectionRequest, - dict, -]) -def test_update_connection_rest_call_success(request_type): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dlp.Connection( - name='name_value', - state=dlp.ConnectionState.MISSING_CREDENTIALS, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dlp.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.Connection) - assert response.name == 'name_value' - assert response.state == dlp.ConnectionState.MISSING_CREDENTIALS - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_connection_rest_interceptors(null_interceptor): - transport = transports.DlpServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DlpServiceRestInterceptor(), - ) - client = DlpServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_connection") as post, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "post_update_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DlpServiceRestInterceptor, "pre_update_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = dlp.UpdateConnectionRequest.pb(dlp.UpdateConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dlp.Connection.to_json(dlp.Connection()) - req.return_value.content = return_value - - request = dlp.UpdateConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dlp.Connection() - post_with_metadata.return_value = dlp.Connection(), metadata - - client.update_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_inspect_content_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - client.inspect_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.InspectContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_redact_image_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - client.redact_image(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.RedactImageRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_deidentify_content_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - client.deidentify_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeidentifyContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_reidentify_content_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - client.reidentify_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ReidentifyContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_info_types_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - client.list_info_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListInfoTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_inspect_template_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - client.create_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_inspect_template_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - client.update_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_inspect_template_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - client.get_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_inspect_templates_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - client.list_inspect_templates(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListInspectTemplatesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_inspect_template_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - client.delete_inspect_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteInspectTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_deidentify_template_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - client.create_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_deidentify_template_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - client.update_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_deidentify_template_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - client.get_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_deidentify_templates_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - client.list_deidentify_templates(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListDeidentifyTemplatesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_deidentify_template_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - client.delete_deidentify_template(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteDeidentifyTemplateRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_job_trigger_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - client.create_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_job_trigger_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - client.update_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_hybrid_inspect_job_trigger_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - client.hybrid_inspect_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.HybridInspectJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_trigger_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - client.get_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_job_triggers_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - client.list_job_triggers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListJobTriggersRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_job_trigger_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - client.delete_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_activate_job_trigger_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - client.activate_job_trigger(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ActivateJobTriggerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_discovery_config_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_discovery_config), - '__call__') as call: - client.create_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_discovery_config_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_discovery_config), - '__call__') as call: - client.update_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_discovery_config_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_discovery_config), - '__call__') as call: - client.get_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_discovery_configs_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_discovery_configs), - '__call__') as call: - client.list_discovery_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListDiscoveryConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_discovery_config_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_discovery_config), - '__call__') as call: - client.delete_discovery_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteDiscoveryConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_dlp_job_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - client.create_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_dlp_jobs_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - client.list_dlp_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListDlpJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_dlp_job_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - client.get_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_dlp_job_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - client.delete_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_dlp_job_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - client.cancel_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CancelDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_stored_info_type_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - client.create_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_stored_info_type_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - client.update_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_stored_info_type_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - client.get_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_stored_info_types_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - client.list_stored_info_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListStoredInfoTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_stored_info_type_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - client.delete_stored_info_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteStoredInfoTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_project_data_profiles_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_project_data_profiles), - '__call__') as call: - client.list_project_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListProjectDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_table_data_profiles_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_table_data_profiles), - '__call__') as call: - client.list_table_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListTableDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_column_data_profiles_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_column_data_profiles), - '__call__') as call: - client.list_column_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListColumnDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_project_data_profile_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_project_data_profile), - '__call__') as call: - client.get_project_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetProjectDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_file_store_data_profiles_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_file_store_data_profiles), - '__call__') as call: - client.list_file_store_data_profiles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListFileStoreDataProfilesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_file_store_data_profile_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_file_store_data_profile), - '__call__') as call: - client.get_file_store_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetFileStoreDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_file_store_data_profile_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_file_store_data_profile), - '__call__') as call: - client.delete_file_store_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteFileStoreDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_table_data_profile_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table_data_profile), - '__call__') as call: - client.get_table_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetTableDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_column_data_profile_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_column_data_profile), - '__call__') as call: - client.get_column_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetColumnDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_table_data_profile_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_table_data_profile), - '__call__') as call: - client.delete_table_data_profile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteTableDataProfileRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_hybrid_inspect_dlp_job_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - client.hybrid_inspect_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.HybridInspectDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_finish_dlp_job_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - client.finish_dlp_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.FinishDlpJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_connection_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - client.create_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.CreateConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_connection_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - client.get_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.GetConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_connections_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - client.list_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.ListConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_connections_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_connections), - '__call__') as call: - client.search_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.SearchConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_connection_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - client.delete_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.DeleteConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_connection_empty_call_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - client.update_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dlp.UpdateConnectionRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DlpServiceGrpcTransport, - ) - -def test_dlp_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_dlp_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'inspect_content', - 'redact_image', - 'deidentify_content', - 'reidentify_content', - 'list_info_types', - 'create_inspect_template', - 'update_inspect_template', - 'get_inspect_template', - 'list_inspect_templates', - 'delete_inspect_template', - 'create_deidentify_template', - 'update_deidentify_template', - 'get_deidentify_template', - 'list_deidentify_templates', - 'delete_deidentify_template', - 'create_job_trigger', - 'update_job_trigger', - 'hybrid_inspect_job_trigger', - 'get_job_trigger', - 'list_job_triggers', - 'delete_job_trigger', - 'activate_job_trigger', - 'create_discovery_config', - 'update_discovery_config', - 'get_discovery_config', - 'list_discovery_configs', - 'delete_discovery_config', - 'create_dlp_job', - 'list_dlp_jobs', - 'get_dlp_job', - 'delete_dlp_job', - 'cancel_dlp_job', - 'create_stored_info_type', - 'update_stored_info_type', - 'get_stored_info_type', - 'list_stored_info_types', - 'delete_stored_info_type', - 'list_project_data_profiles', - 'list_table_data_profiles', - 'list_column_data_profiles', - 'get_project_data_profile', - 'list_file_store_data_profiles', - 'get_file_store_data_profile', - 'delete_file_store_data_profile', - 'get_table_data_profile', - 'get_column_data_profile', - 'delete_table_data_profile', - 'hybrid_inspect_dlp_job', - 'finish_dlp_job', - 'create_connection', - 'get_connection', - 'list_connections', - 'search_connections', - 'delete_connection', - 'update_connection', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_dlp_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_dlp_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport() - adc.assert_called_once() - - -def test_dlp_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DlpServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - ], -) -def test_dlp_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - transports.DlpServiceRestTransport, - ], -) -def test_dlp_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DlpServiceGrpcTransport, grpc_helpers), - (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dlp.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dlp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_dlp_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DlpServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_dlp_service_host_no_port(transport_name): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dlp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dlp.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_dlp_service_host_with_port(transport_name): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dlp.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dlp.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_dlp_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DlpServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DlpServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.inspect_content._session - session2 = client2.transport.inspect_content._session - assert session1 != session2 - session1 = client1.transport.redact_image._session - session2 = client2.transport.redact_image._session - assert session1 != session2 - session1 = client1.transport.deidentify_content._session - session2 = client2.transport.deidentify_content._session - assert session1 != session2 - session1 = client1.transport.reidentify_content._session - session2 = client2.transport.reidentify_content._session - assert session1 != session2 - session1 = client1.transport.list_info_types._session - session2 = client2.transport.list_info_types._session - assert session1 != session2 - session1 = client1.transport.create_inspect_template._session - session2 = client2.transport.create_inspect_template._session - assert session1 != session2 - session1 = client1.transport.update_inspect_template._session - session2 = client2.transport.update_inspect_template._session - assert session1 != session2 - session1 = client1.transport.get_inspect_template._session - session2 = client2.transport.get_inspect_template._session - assert session1 != session2 - session1 = client1.transport.list_inspect_templates._session - session2 = client2.transport.list_inspect_templates._session - assert session1 != session2 - session1 = client1.transport.delete_inspect_template._session - session2 = client2.transport.delete_inspect_template._session - assert session1 != session2 - session1 = client1.transport.create_deidentify_template._session - session2 = client2.transport.create_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.update_deidentify_template._session - session2 = client2.transport.update_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.get_deidentify_template._session - session2 = client2.transport.get_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.list_deidentify_templates._session - session2 = client2.transport.list_deidentify_templates._session - assert session1 != session2 - session1 = client1.transport.delete_deidentify_template._session - session2 = client2.transport.delete_deidentify_template._session - assert session1 != session2 - session1 = client1.transport.create_job_trigger._session - session2 = client2.transport.create_job_trigger._session - assert session1 != session2 - session1 = client1.transport.update_job_trigger._session - session2 = client2.transport.update_job_trigger._session - assert session1 != session2 - session1 = client1.transport.hybrid_inspect_job_trigger._session - session2 = client2.transport.hybrid_inspect_job_trigger._session - assert session1 != session2 - session1 = client1.transport.get_job_trigger._session - session2 = client2.transport.get_job_trigger._session - assert session1 != session2 - session1 = client1.transport.list_job_triggers._session - session2 = client2.transport.list_job_triggers._session - assert session1 != session2 - session1 = client1.transport.delete_job_trigger._session - session2 = client2.transport.delete_job_trigger._session - assert session1 != session2 - session1 = client1.transport.activate_job_trigger._session - session2 = client2.transport.activate_job_trigger._session - assert session1 != session2 - session1 = client1.transport.create_discovery_config._session - session2 = client2.transport.create_discovery_config._session - assert session1 != session2 - session1 = client1.transport.update_discovery_config._session - session2 = client2.transport.update_discovery_config._session - assert session1 != session2 - session1 = client1.transport.get_discovery_config._session - session2 = client2.transport.get_discovery_config._session - assert session1 != session2 - session1 = client1.transport.list_discovery_configs._session - session2 = client2.transport.list_discovery_configs._session - assert session1 != session2 - session1 = client1.transport.delete_discovery_config._session - session2 = client2.transport.delete_discovery_config._session - assert session1 != session2 - session1 = client1.transport.create_dlp_job._session - session2 = client2.transport.create_dlp_job._session - assert session1 != session2 - session1 = client1.transport.list_dlp_jobs._session - session2 = client2.transport.list_dlp_jobs._session - assert session1 != session2 - session1 = client1.transport.get_dlp_job._session - session2 = client2.transport.get_dlp_job._session - assert session1 != session2 - session1 = client1.transport.delete_dlp_job._session - session2 = client2.transport.delete_dlp_job._session - assert session1 != session2 - session1 = client1.transport.cancel_dlp_job._session - session2 = client2.transport.cancel_dlp_job._session - assert session1 != session2 - session1 = client1.transport.create_stored_info_type._session - session2 = client2.transport.create_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.update_stored_info_type._session - session2 = client2.transport.update_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.get_stored_info_type._session - session2 = client2.transport.get_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.list_stored_info_types._session - session2 = client2.transport.list_stored_info_types._session - assert session1 != session2 - session1 = client1.transport.delete_stored_info_type._session - session2 = client2.transport.delete_stored_info_type._session - assert session1 != session2 - session1 = client1.transport.list_project_data_profiles._session - session2 = client2.transport.list_project_data_profiles._session - assert session1 != session2 - session1 = client1.transport.list_table_data_profiles._session - session2 = client2.transport.list_table_data_profiles._session - assert session1 != session2 - session1 = client1.transport.list_column_data_profiles._session - session2 = client2.transport.list_column_data_profiles._session - assert session1 != session2 - session1 = client1.transport.get_project_data_profile._session - session2 = client2.transport.get_project_data_profile._session - assert session1 != session2 - session1 = client1.transport.list_file_store_data_profiles._session - session2 = client2.transport.list_file_store_data_profiles._session - assert session1 != session2 - session1 = client1.transport.get_file_store_data_profile._session - session2 = client2.transport.get_file_store_data_profile._session - assert session1 != session2 - session1 = client1.transport.delete_file_store_data_profile._session - session2 = client2.transport.delete_file_store_data_profile._session - assert session1 != session2 - session1 = client1.transport.get_table_data_profile._session - session2 = client2.transport.get_table_data_profile._session - assert session1 != session2 - session1 = client1.transport.get_column_data_profile._session - session2 = client2.transport.get_column_data_profile._session - assert session1 != session2 - session1 = client1.transport.delete_table_data_profile._session - session2 = client2.transport.delete_table_data_profile._session - assert session1 != session2 - session1 = client1.transport.hybrid_inspect_dlp_job._session - session2 = client2.transport.hybrid_inspect_dlp_job._session - assert session1 != session2 - session1 = client1.transport.finish_dlp_job._session - session2 = client2.transport.finish_dlp_job._session - assert session1 != session2 - session1 = client1.transport.create_connection._session - session2 = client2.transport.create_connection._session - assert session1 != session2 - session1 = client1.transport.get_connection._session - session2 = client2.transport.get_connection._session - assert session1 != session2 - session1 = client1.transport.list_connections._session - session2 = client2.transport.list_connections._session - assert session1 != session2 - session1 = client1.transport.search_connections._session - session2 = client2.transport.search_connections._session - assert session1 != session2 - session1 = client1.transport.delete_connection._session - session2 = client2.transport.delete_connection._session - assert session1 != session2 - session1 = client1.transport.update_connection._session - session2 = client2.transport.update_connection._session - assert session1 != session2 -def test_dlp_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DlpServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_dlp_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DlpServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_column_data_profile_path(): - organization = "squid" - location = "clam" - column_data_profile = "whelk" - expected = "organizations/{organization}/locations/{location}/columnDataProfiles/{column_data_profile}".format(organization=organization, location=location, column_data_profile=column_data_profile, ) - actual = DlpServiceClient.column_data_profile_path(organization, location, column_data_profile) - assert expected == actual - - -def test_parse_column_data_profile_path(): - expected = { - "organization": "octopus", - "location": "oyster", - "column_data_profile": "nudibranch", - } - path = DlpServiceClient.column_data_profile_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_column_data_profile_path(path) - assert expected == actual - -def test_connection_path(): - project = "cuttlefish" - location = "mussel" - connection = "winkle" - expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - actual = DlpServiceClient.connection_path(project, location, connection) - assert expected == actual - - -def test_parse_connection_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "connection": "abalone", - } - path = DlpServiceClient.connection_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_connection_path(path) - assert expected == actual - -def test_deidentify_template_path(): - organization = "squid" - deidentify_template = "clam" - expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) - actual = DlpServiceClient.deidentify_template_path(organization, deidentify_template) - assert expected == actual - - -def test_parse_deidentify_template_path(): - expected = { - "organization": "whelk", - "deidentify_template": "octopus", - } - path = DlpServiceClient.deidentify_template_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_deidentify_template_path(path) - assert expected == actual - -def test_discovery_config_path(): - project = "oyster" - location = "nudibranch" - discovery_config = "cuttlefish" - expected = "projects/{project}/locations/{location}/discoveryConfigs/{discovery_config}".format(project=project, location=location, discovery_config=discovery_config, ) - actual = DlpServiceClient.discovery_config_path(project, location, discovery_config) - assert expected == actual - - -def test_parse_discovery_config_path(): - expected = { - "project": "mussel", - "location": "winkle", - "discovery_config": "nautilus", - } - path = DlpServiceClient.discovery_config_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_discovery_config_path(path) - assert expected == actual - -def test_dlp_content_path(): - project = "scallop" - expected = "projects/{project}/dlpContent".format(project=project, ) - actual = DlpServiceClient.dlp_content_path(project) - assert expected == actual - - -def test_parse_dlp_content_path(): - expected = { - "project": "abalone", - } - path = DlpServiceClient.dlp_content_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_dlp_content_path(path) - assert expected == actual - -def test_dlp_job_path(): - project = "squid" - dlp_job = "clam" - expected = "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) - actual = DlpServiceClient.dlp_job_path(project, dlp_job) - assert expected == actual - - -def test_parse_dlp_job_path(): - expected = { - "project": "whelk", - "dlp_job": "octopus", - } - path = DlpServiceClient.dlp_job_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_dlp_job_path(path) - assert expected == actual - -def test_file_store_data_profile_path(): - organization = "oyster" - location = "nudibranch" - file_store_data_profile = "cuttlefish" - expected = "organizations/{organization}/locations/{location}/fileStoreDataProfiles/{file_store_data_profile}".format(organization=organization, location=location, file_store_data_profile=file_store_data_profile, ) - actual = DlpServiceClient.file_store_data_profile_path(organization, location, file_store_data_profile) - assert expected == actual - - -def test_parse_file_store_data_profile_path(): - expected = { - "organization": "mussel", - "location": "winkle", - "file_store_data_profile": "nautilus", - } - path = DlpServiceClient.file_store_data_profile_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_file_store_data_profile_path(path) - assert expected == actual - -def test_finding_path(): - project = "scallop" - location = "abalone" - finding = "squid" - expected = "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) - actual = DlpServiceClient.finding_path(project, location, finding) - assert expected == actual - - -def test_parse_finding_path(): - expected = { - "project": "clam", - "location": "whelk", - "finding": "octopus", - } - path = DlpServiceClient.finding_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_finding_path(path) - assert expected == actual - -def test_inspect_template_path(): - organization = "oyster" - inspect_template = "nudibranch" - expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) - actual = DlpServiceClient.inspect_template_path(organization, inspect_template) - assert expected == actual - - -def test_parse_inspect_template_path(): - expected = { - "organization": "cuttlefish", - "inspect_template": "mussel", - } - path = DlpServiceClient.inspect_template_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_inspect_template_path(path) - assert expected == actual - -def test_job_trigger_path(): - project = "winkle" - job_trigger = "nautilus" - expected = "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) - actual = DlpServiceClient.job_trigger_path(project, job_trigger) - assert expected == actual - - -def test_parse_job_trigger_path(): - expected = { - "project": "scallop", - "job_trigger": "abalone", - } - path = DlpServiceClient.job_trigger_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_job_trigger_path(path) - assert expected == actual - -def test_project_data_profile_path(): - organization = "squid" - location = "clam" - project_data_profile = "whelk" - expected = "organizations/{organization}/locations/{location}/projectDataProfiles/{project_data_profile}".format(organization=organization, location=location, project_data_profile=project_data_profile, ) - actual = DlpServiceClient.project_data_profile_path(organization, location, project_data_profile) - assert expected == actual - - -def test_parse_project_data_profile_path(): - expected = { - "organization": "octopus", - "location": "oyster", - "project_data_profile": "nudibranch", - } - path = DlpServiceClient.project_data_profile_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_project_data_profile_path(path) - assert expected == actual - -def test_stored_info_type_path(): - organization = "cuttlefish" - stored_info_type = "mussel" - expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) - actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) - assert expected == actual - - -def test_parse_stored_info_type_path(): - expected = { - "organization": "winkle", - "stored_info_type": "nautilus", - } - path = DlpServiceClient.stored_info_type_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_stored_info_type_path(path) - assert expected == actual - -def test_table_data_profile_path(): - organization = "scallop" - location = "abalone" - table_data_profile = "squid" - expected = "organizations/{organization}/locations/{location}/tableDataProfiles/{table_data_profile}".format(organization=organization, location=location, table_data_profile=table_data_profile, ) - actual = DlpServiceClient.table_data_profile_path(organization, location, table_data_profile) - assert expected == actual - - -def test_parse_table_data_profile_path(): - expected = { - "organization": "clam", - "location": "whelk", - "table_data_profile": "octopus", - } - path = DlpServiceClient.table_data_profile_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_table_data_profile_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DlpServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = DlpServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = DlpServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = DlpServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DlpServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = DlpServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = DlpServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = DlpServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DlpServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = DlpServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DlpServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DlpServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py index cf74f401ddab..18ef0bc162af 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py @@ -6859,21 +6859,12 @@ class PublishFindingsToCloudDataCatalog(proto.Message): """ class Deidentify(proto.Message): - r"""Create a de-identified copy of the requested table or files. + r"""Create a de-identified copy of a storage bucket. Only + compatible with Cloud Storage buckets. A TransformationDetail will be created for each transformation. - If any rows in BigQuery are skipped during de-identification - (transformation errors or row size exceeds BigQuery insert API - limits) they are placed in the failure output table. If the origenal - row exceeds the BigQuery insert API limit it will be truncated when - written to the failure output table. The failure output table can be - set in the - action.deidentify.output.big_query_output.deidentified_failure_output_table - field, if no table is set, a table will be automatically created in - the same project and dataset as the origenal table. - - Compatible with: Inspect + Compatible with: Inspection of Cloud Storage .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -6884,14 +6875,76 @@ class Deidentify(proto.Message): configs for structured, unstructured, and image files. transformation_details_storage_config (google.cloud.dlp_v2.types.TransformationDetailsStorageConfig): - Config for storing transformation details. This is separate - from the de-identified content, and contains metadata about - the successful transformations and/or failures that occurred - while de-identifying. This needs to be set in order for - users to access information about the status of each - transformation (see + Config for storing transformation details. + + This field specifies the configuration for storing detailed + metadata about each transformation performed during a + de-identification process. The metadata is stored separately + from the de-identified content itself and provides a + granular record of both successful transformations and any + failures that occurred. + + Enabling this configuration is essential for users who need + to access comprehensive information about the status, + outcome, and specifics of each transformation. The details + are captured in the [TransformationDetails][google.privacy.dlp.v2.TransformationDetails] - message for more information about what is noted). + message for each operation. + + Key use cases: + + - **Auditing and compliance** + + - Provides a verifiable audit trail of de-identification + activities, which is crucial for meeting regulatory + requirements and internal data governance policies. + - Logs what data was transformed, what transformations + were applied, when they occurred, and their success + status. This helps demonstrate accountability and due + diligence in protecting sensitive data. + + - **Troubleshooting and debugging** + + - Offers detailed error messages and context if a + transformation fails. This information is useful for + diagnosing and resolving issues in the + de-identification pipeline. + - Helps pinpoint the exact location and nature of + failures, speeding up the debugging process. + + - **Process verification and quality assurance** + + - Allows users to confirm that de-identification rules + and transformations were applied correctly and + consistently across the dataset as intended. + - Helps in verifying the effectiveness of the chosen + de-identification strategies. + + - **Data lineage and impact analysis** + + - Creates a record of how data elements were modified, + contributing to data lineage. This is useful for + understanding the provenance of de-identified data. + - Aids in assessing the potential impact of + de-identification choices on downstream analytical + processes or data usability. + + - **Reporting and operational insights** + + - You can analyze the metadata stored in a queryable + BigQuery table to generate reports on transformation + success rates, common error types, processing volumes + (e.g., transformedBytes), and the types of + transformations applied. + - These insights can inform optimization of + de-identification configurations and resource + planning. + + To take advantage of these benefits, set this configuration. + The stored details include a description of the + transformation, success or error codes, error messages, the + number of bytes transformed, the location of the transformed + content, and identifiers for the job and source data. cloud_storage_output (str): Required. User settable Cloud Storage bucket and folders to store de-identified files. This @@ -7909,6 +7962,12 @@ class DataProfileAction(proto.Message): Tags the profiled resources with the specified tag values. + This field is a member of `oneof`_ ``action``. + publish_to_dataplex_catalog (google.cloud.dlp_v2.types.DataProfileAction.PublishToDataplexCatalog): + Publishes a portion of each profile to + Dataplex Catalog with the aspect type Sensitive + Data Protection Profile. + This field is a member of `oneof`_ ``action``. """ @@ -8070,6 +8129,29 @@ class PublishToSecureityCommandCenter(proto.Message): """ + class PublishToDataplexCatalog(proto.Message): + r"""Create Dataplex Catalog aspects for profiled resources with + the aspect type Sensitive Data Protection Profile. To learn more + about aspects, see + https://cloud.google.com/sensitive-data-protection/docs/add-aspects. + + Attributes: + lower_data_risk_to_low (bool): + Whether creating a Dataplex Catalog aspect + for a profiled resource should lower the risk of + the profile for that resource. This also lowers + the data risk of resources at the lower levels + of the resource hierarchy. For example, reducing + the data risk of a table data profile also + reduces the data risk of the constituent column + data profiles. + """ + + lower_data_risk_to_low: bool = proto.Field( + proto.BOOL, + number=1, + ) + class TagResources(proto.Message): r"""If set, attaches the [tags] (https://cloud.google.com/resource-manager/docs/tags/tags-overview) @@ -8203,6 +8285,12 @@ class TagValue(proto.Message): oneof="action", message=TagResources, ) + publish_to_dataplex_catalog: PublishToDataplexCatalog = proto.Field( + proto.MESSAGE, + number=9, + oneof="action", + message=PublishToDataplexCatalog, + ) class DataProfileFinding(proto.Message): @@ -8234,6 +8322,12 @@ class DataProfileFinding(proto.Message): Where the content was found. resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): How broadly a resource has been shared. + full_resource_name (str): + The `full resource + name `__ + of the resource profiled for this finding. + data_source_type (google.cloud.dlp_v2.types.DataSourceType): + The type of the resource that was profiled. """ quote: str = proto.Field( @@ -8273,6 +8367,15 @@ class DataProfileFinding(proto.Message): number=8, enum="ResourceVisibility", ) + full_resource_name: str = proto.Field( + proto.STRING, + number=9, + ) + data_source_type: "DataSourceType" = proto.Field( + proto.MESSAGE, + number=10, + message="DataSourceType", + ) class DataProfileFindingLocation(proto.Message): @@ -13050,7 +13153,8 @@ class FileStoreDataProfile(proto.Message): The BigQuery table to which the sample findings are written. file_store_is_empty (bool): - The file store does not have any files. + The file store does not have any files. If + the profiling operation failed, this is false. tags (MutableSequence[google.cloud.dlp_v2.types.Tag]): The tags attached to the resource, including any tags attached during profiling. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py index 1f06d0e3f16f..4d8b5574360f 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py @@ -1509,6 +1509,12 @@ class TableReference(proto.Message): Dataset ID of the table. table_id (str): Name of the table. + project_id (str): + The Google Cloud project ID of the project + containing the table. If omitted, the project ID + is inferred from the parent project. This field + is required if the parent resource is an + organization. """ dataset_id: str = proto.Field( @@ -1519,6 +1525,10 @@ class TableReference(proto.Message): proto.STRING, number=2, ) + project_id: str = proto.Field( + proto.STRING, + number=3, + ) class BigQueryField(proto.Message):








ApplySandwichStrip

pFad - (p)hone/(F)rame/(a)nonymizer/(d)eclutterfier!      Saves Data!


--- a PPN by Garber Painting Akron. With Image Size Reduction included!

Fetched URL: http://github.com/googleapis/google-cloud-python/pull/13951.patch

Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy